about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs3640
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs154
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs1145
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs258
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs297
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs391
-rw-r--r--compiler/rustc_parse/src/lib.rs246
-rw-r--r--compiler/rustc_parse/src/parser/asm.rs385
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs512
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs407
-rw-r--r--compiler/rustc_parse/src/parser/cfg_select.rs75
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs3108
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs4234
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs578
-rw-r--r--compiler/rustc_parse/src/parser/item.rs3272
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs1665
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs204
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs1759
-rw-r--r--compiler/rustc_parse/src/parser/path.rs1008
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs1068
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs2935
-rw-r--r--compiler/rustc_parse/src/parser/token_type.rs624
-rw-r--r--compiler/rustc_parse/src/parser/tokenstream/tests.rs114
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs1470
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs343
25 files changed, 29892 insertions, 0 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
new file mode 100644
index 00000000000..48ff0394d46
--- /dev/null
+++ b/compiler/rustc_parse/src/errors.rs
@@ -0,0 +1,3640 @@
+// ignore-tidy-filelength
+
+use std::borrow::Cow;
+
+use rustc_ast::token::Token;
+use rustc_ast::util::parser::ExprPrecedence;
+use rustc_ast::{Path, Visibility};
+use rustc_errors::codes::*;
+use rustc_errors::{
+    Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic,
+};
+use rustc_macros::{Diagnostic, Subdiagnostic};
+use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::edition::{Edition, LATEST_STABLE_EDITION};
+use rustc_span::{Ident, Span, Symbol};
+
+use crate::fluent_generated as fluent;
+use crate::parser::{ForbiddenLetReason, TokenDescription};
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_report_ambiguous_plus)]
+pub(crate) struct AmbiguousPlus {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub suggestion: AddParen,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_recover_from_bad_type_plus, code = E0178)]
+pub(crate) struct BadTypePlus {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: BadTypePlusSub,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_add_paren, applicability = "machine-applicable")]
+pub(crate) struct AddParen {
+    #[suggestion_part(code = "(")]
+    pub lo: Span,
+    #[suggestion_part(code = ")")]
+    pub hi: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum BadTypePlusSub {
+    AddParen {
+        #[subdiagnostic]
+        suggestion: AddParen,
+    },
+    #[label(parse_forgot_paren)]
+    ForgotParen {
+        #[primary_span]
+        span: Span,
+    },
+    #[label(parse_expect_path)]
+    ExpectPath {
+        #[primary_span]
+        span: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_recover_from_bad_qpath_stage_2)]
+pub(crate) struct BadQPathStage2 {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub wrap: WrapType,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WrapType {
+    #[suggestion_part(code = "<")]
+    pub lo: Span,
+    #[suggestion_part(code = ">")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_semicolon)]
+pub(crate) struct IncorrectSemicolon<'a> {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    #[help]
+    pub show_help: bool,
+    pub name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_use_of_await)]
+pub(crate) struct IncorrectUseOfAwait {
+    #[primary_span]
+    #[suggestion(
+        parse_parentheses_suggestion,
+        style = "verbose",
+        code = "",
+        applicability = "machine-applicable"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_use_of_use)]
+pub(crate) struct IncorrectUseOfUse {
+    #[primary_span]
+    #[suggestion(
+        parse_parentheses_suggestion,
+        style = "verbose",
+        code = "",
+        applicability = "machine-applicable"
+    )]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_incorrect_use_of_await_postfix_suggestion,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct AwaitSuggestion {
+    #[suggestion_part(code = "")]
+    pub removal: Span,
+    #[suggestion_part(code = ".await{question_mark}")]
+    pub dot_await: Span,
+    pub question_mark: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_use_of_await)]
+pub(crate) struct IncorrectAwait {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub suggestion: AwaitSuggestion,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_in_in_typo)]
+pub(crate) struct InInTypo {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", style = "verbose", applicability = "machine-applicable")]
+    pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_variable_declaration)]
+pub(crate) struct InvalidVariableDeclaration {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: InvalidVariableDeclarationSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidVariableDeclarationSub {
+    #[suggestion(
+        parse_switch_mut_let_order,
+        style = "verbose",
+        applicability = "maybe-incorrect",
+        code = "let mut"
+    )]
+    SwitchMutLetOrder(#[primary_span] Span),
+    #[suggestion(
+        parse_missing_let_before_mut,
+        applicability = "machine-applicable",
+        style = "verbose",
+        code = "let mut"
+    )]
+    MissingLet(#[primary_span] Span),
+    #[suggestion(
+        parse_use_let_not_auto,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "let"
+    )]
+    UseLetNotAuto(#[primary_span] Span),
+    #[suggestion(
+        parse_use_let_not_var,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "let"
+    )]
+    UseLetNotVar(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_switch_ref_box_order)]
+pub(crate) struct SwitchRefBoxOrder {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", style = "verbose", code = "box ref")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_comparison_operator)]
+pub(crate) struct InvalidComparisonOperator {
+    #[primary_span]
+    pub span: Span,
+    pub invalid: String,
+    #[subdiagnostic]
+    pub sub: InvalidComparisonOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidComparisonOperatorSub {
+    #[suggestion(
+        parse_use_instead,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "{correct}"
+    )]
+    Correctable {
+        #[primary_span]
+        span: Span,
+        invalid: String,
+        correct: String,
+    },
+    #[label(parse_spaceship_operator_invalid)]
+    Spaceship(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_logical_operator)]
+#[note]
+pub(crate) struct InvalidLogicalOperator {
+    #[primary_span]
+    pub span: Span,
+    pub incorrect: String,
+    #[subdiagnostic]
+    pub sub: InvalidLogicalOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidLogicalOperatorSub {
+    #[suggestion(
+        parse_use_amp_amp_for_conjunction,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "&&"
+    )]
+    Conjunction(#[primary_span] Span),
+    #[suggestion(
+        parse_use_pipe_pipe_for_disjunction,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "||"
+    )]
+    Disjunction(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_tilde_is_not_unary_operator)]
+pub(crate) struct TildeAsUnaryOperator(
+    #[primary_span]
+    #[suggestion(style = "verbose", applicability = "machine-applicable", code = "!")]
+    pub Span,
+);
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_not)]
+pub(crate) struct NotAsNegationOperator {
+    #[primary_span]
+    pub negated: Span,
+    pub negated_desc: String,
+    #[subdiagnostic]
+    pub sub: NotAsNegationOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum NotAsNegationOperatorSub {
+    #[suggestion(
+        parse_unexpected_token_after_not_default,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "!"
+    )]
+    SuggestNotDefault(#[primary_span] Span),
+
+    #[suggestion(
+        parse_unexpected_token_after_not_bitwise,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "!"
+    )]
+    SuggestNotBitwise(#[primary_span] Span),
+
+    #[suggestion(
+        parse_unexpected_token_after_not_logical,
+        style = "verbose",
+        applicability = "machine-applicable",
+        code = "!"
+    )]
+    SuggestNotLogical(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_malformed_loop_label)]
+pub(crate) struct MalformedLoopLabel {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = "'", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_in_borrow_expression)]
+pub(crate) struct LifetimeInBorrowExpression {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = "", style = "verbose")]
+    #[label]
+    pub lifetime_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_field_expression_with_generic)]
+pub(crate) struct FieldExpressionWithGeneric(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_invocation_with_qualified_path)]
+pub(crate) struct MacroInvocationWithQualifiedPath(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_label)]
+pub(crate) struct UnexpectedTokenAfterLabel {
+    #[primary_span]
+    #[label(parse_unexpected_token_after_label)]
+    pub span: Span,
+    #[suggestion(parse_suggestion_remove_label, style = "verbose", code = "")]
+    pub remove_label: Option<Span>,
+    #[subdiagnostic]
+    pub enclose_in_block: Option<UnexpectedTokenAfterLabelSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion_enclose_in_block, applicability = "machine-applicable")]
+pub(crate) struct UnexpectedTokenAfterLabelSugg {
+    #[suggestion_part(code = "{{ ")]
+    pub left: Span,
+    #[suggestion_part(code = " }}")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_require_colon_after_labeled_expression)]
+#[note]
+pub(crate) struct RequireColonAfterLabeledExpression {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub label: Span,
+    #[suggestion(style = "verbose", applicability = "machine-applicable", code = ": ")]
+    pub label_end: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_do_catch_syntax_removed)]
+#[note]
+pub(crate) struct DoCatchSyntaxRemoved {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "try", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_float_literal_requires_integer_part)]
+pub(crate) struct FloatLiteralRequiresIntegerPart {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = "0", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_semicolon_before_array)]
+pub(crate) struct MissingSemicolonBeforeArray {
+    #[primary_span]
+    pub open_delim: Span,
+    #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = ";")]
+    pub semicolon: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expect_dotdot_not_dotdotdot)]
+pub(crate) struct MissingDotDot {
+    #[primary_span]
+    pub token_span: Span,
+    #[suggestion(applicability = "maybe-incorrect", code = "..", style = "verbose")]
+    pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_block_macro_segment)]
+pub(crate) struct InvalidBlockMacroSegment {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub context: Span,
+    #[subdiagnostic]
+    pub wrap: WrapInExplicitBlock,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WrapInExplicitBlock {
+    #[suggestion_part(code = "{{ ")]
+    pub lo: Span,
+    #[suggestion_part(code = " }}")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_if_expression_missing_then_block)]
+pub(crate) struct IfExpressionMissingThenBlock {
+    #[primary_span]
+    pub if_span: Span,
+    #[subdiagnostic]
+    pub missing_then_block_sub: IfExpressionMissingThenBlockSub,
+    #[subdiagnostic]
+    pub let_else_sub: Option<IfExpressionLetSomeSub>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum IfExpressionMissingThenBlockSub {
+    #[help(parse_condition_possibly_unfinished)]
+    UnfinishedCondition(#[primary_span] Span),
+    #[help(parse_add_then_block)]
+    AddThenBlock(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ternary_operator)]
+pub(crate) struct TernaryOperator {
+    #[primary_span]
+    pub span: Span,
+    /// If we have a span for the condition expression, suggest the if/else
+    #[subdiagnostic]
+    pub sugg: Option<TernaryOperatorSuggestion>,
+    /// Otherwise, just print the suggestion message
+    #[help(parse_use_if_else)]
+    pub no_sugg: bool,
+}
+
+#[derive(Subdiagnostic, Copy, Clone)]
+#[multipart_suggestion(parse_use_if_else, applicability = "maybe-incorrect", style = "verbose")]
+pub(crate) struct TernaryOperatorSuggestion {
+    #[suggestion_part(code = "if ")]
+    pub before_cond: Span,
+    #[suggestion_part(code = "{{")]
+    pub question: Span,
+    #[suggestion_part(code = "}} else {{")]
+    pub colon: Span,
+    #[suggestion_part(code = " }}")]
+    pub end: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_extra_if_in_let_else,
+    applicability = "maybe-incorrect",
+    code = "",
+    style = "verbose"
+)]
+pub(crate) struct IfExpressionLetSomeSub {
+    #[primary_span]
+    pub if_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_if_expression_missing_condition)]
+pub(crate) struct IfExpressionMissingCondition {
+    #[primary_span]
+    #[label(parse_condition_label)]
+    pub if_span: Span,
+    #[label(parse_block_label)]
+    pub block_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_expression_found_let)]
+#[note]
+pub(crate) struct ExpectedExpressionFoundLet {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub reason: ForbiddenLetReason,
+    #[subdiagnostic]
+    pub missing_let: Option<MaybeMissingLet>,
+    #[subdiagnostic]
+    pub comparison: Option<MaybeComparison>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_or_in_let_chain)]
+pub(crate) struct OrInLetChain {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic, Clone, Copy)]
+#[multipart_suggestion(
+    parse_maybe_missing_let,
+    applicability = "maybe-incorrect",
+    style = "verbose"
+)]
+pub(crate) struct MaybeMissingLet {
+    #[suggestion_part(code = "let ")]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic, Clone, Copy)]
+#[multipart_suggestion(
+    parse_maybe_comparison,
+    applicability = "maybe-incorrect",
+    style = "verbose"
+)]
+pub(crate) struct MaybeComparison {
+    #[suggestion_part(code = "=")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expect_eq_instead_of_eqeq)]
+pub(crate) struct ExpectedEqForLetExpr {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "maybe-incorrect", code = "=", style = "verbose")]
+    pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_else_block)]
+pub(crate) struct ExpectedElseBlock {
+    #[primary_span]
+    pub first_tok_span: Span,
+    pub first_tok: String,
+    #[label]
+    pub else_span: Span,
+    #[suggestion(applicability = "maybe-incorrect", code = "if ", style = "verbose")]
+    pub condition_start: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_struct_field)]
+pub(crate) struct ExpectedStructField {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub token: Token,
+    #[label(parse_ident_label)]
+    pub ident_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_outer_attribute_not_allowed_on_if_else)]
+pub(crate) struct OuterAttributeNotAllowedOnIfElse {
+    #[primary_span]
+    pub last: Span,
+
+    #[label(parse_branch_label)]
+    pub branch_span: Span,
+
+    #[label(parse_ctx_label)]
+    pub ctx_span: Span,
+    pub ctx: String,
+
+    #[suggestion(applicability = "machine-applicable", code = "", style = "verbose")]
+    pub attributes: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_in_in_for_loop)]
+pub(crate) struct MissingInInForLoop {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: MissingInInForLoopSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MissingInInForLoopSub {
+    // Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
+    #[suggestion(
+        parse_use_in_not_of,
+        style = "verbose",
+        applicability = "maybe-incorrect",
+        code = "in"
+    )]
+    InNotOf(#[primary_span] Span),
+    #[suggestion(parse_add_in, style = "verbose", applicability = "maybe-incorrect", code = " in ")]
+    AddIn(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_expression_in_for_loop)]
+pub(crate) struct MissingExpressionInForLoop {
+    #[primary_span]
+    #[suggestion(
+        code = "/* expression */ ",
+        applicability = "has-placeholders",
+        style = "verbose"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_loop_else)]
+#[note]
+pub(crate) struct LoopElseNotSupported {
+    #[primary_span]
+    pub span: Span,
+    pub loop_kind: &'static str,
+    #[label(parse_loop_keyword)]
+    pub loop_kw: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_comma_after_match_arm)]
+pub(crate) struct MissingCommaAfterMatchArm {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = ",", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_catch_after_try)]
+#[help]
+pub(crate) struct CatchAfterTry {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_comma_after_base_struct)]
+#[note]
+pub(crate) struct CommaAfterBaseStruct {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "verbose", applicability = "machine-applicable", code = "")]
+    pub comma: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_eq_field_init)]
+pub(crate) struct EqFieldInit {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = ":", style = "verbose")]
+    pub eq: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dotdotdot)]
+pub(crate) struct DotDotDot {
+    #[primary_span]
+    #[suggestion(
+        parse_suggest_exclusive_range,
+        applicability = "maybe-incorrect",
+        code = "..",
+        style = "verbose"
+    )]
+    #[suggestion(
+        parse_suggest_inclusive_range,
+        applicability = "maybe-incorrect",
+        code = "..=",
+        style = "verbose"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_left_arrow_operator)]
+pub(crate) struct LeftArrowOperator {
+    #[primary_span]
+    #[suggestion(applicability = "maybe-incorrect", code = "< -", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_remove_let)]
+pub(crate) struct RemoveLet {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = "", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_use_eq_instead)]
+pub(crate) struct UseEqInstead {
+    #[primary_span]
+    #[suggestion(style = "verbose", applicability = "machine-applicable", code = "=")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_use_empty_block_not_semi)]
+pub(crate) struct UseEmptyBlockNotSemi {
+    #[primary_span]
+    #[suggestion(style = "hidden", applicability = "machine-applicable", code = "{{}}")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_comparison_interpreted_as_generic)]
+pub(crate) struct ComparisonInterpretedAsGeneric {
+    #[primary_span]
+    #[label(parse_label_comparison)]
+    pub comparison: Span,
+    pub r#type: Path,
+    #[label(parse_label_args)]
+    pub args: Span,
+    #[subdiagnostic]
+    pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_shift_interpreted_as_generic)]
+pub(crate) struct ShiftInterpretedAsGeneric {
+    #[primary_span]
+    #[label(parse_label_comparison)]
+    pub shift: Span,
+    pub r#type: Path,
+    #[label(parse_label_args)]
+    pub args: Span,
+    #[subdiagnostic]
+    pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
+    #[suggestion_part(code = "(")]
+    pub left: Span,
+    #[suggestion_part(code = ")")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_found_expr_would_be_stmt)]
+pub(crate) struct FoundExprWouldBeStmt {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub token: Token,
+    #[subdiagnostic]
+    pub suggestion: ExprParenthesesNeeded,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_frontmatter_extra_characters_after_close)]
+pub(crate) struct FrontmatterExtraCharactersAfterClose {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_frontmatter_invalid_infostring)]
+#[note]
+pub(crate) struct FrontmatterInvalidInfostring {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_frontmatter_invalid_opening_preceding_whitespace)]
+pub(crate) struct FrontmatterInvalidOpeningPrecedingWhitespace {
+    #[primary_span]
+    pub span: Span,
+    #[note]
+    pub note_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_frontmatter_unclosed)]
+pub(crate) struct FrontmatterUnclosed {
+    #[primary_span]
+    pub span: Span,
+    #[note]
+    pub note_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_frontmatter_invalid_close_preceding_whitespace)]
+pub(crate) struct FrontmatterInvalidClosingPrecedingWhitespace {
+    #[primary_span]
+    pub span: Span,
+    #[note]
+    pub note_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_frontmatter_length_mismatch)]
+pub(crate) struct FrontmatterLengthMismatch {
+    #[primary_span]
+    pub span: Span,
+    #[label(parse_label_opening)]
+    pub opening: Span,
+    #[label(parse_label_close)]
+    pub close: Span,
+    pub len_opening: usize,
+    pub len_close: usize,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_leading_plus_not_supported)]
+pub(crate) struct LeadingPlusNotSupported {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(
+        parse_suggestion_remove_plus,
+        style = "verbose",
+        code = "",
+        applicability = "machine-applicable"
+    )]
+    pub remove_plus: Option<Span>,
+    #[subdiagnostic]
+    pub add_parentheses: Option<ExprParenthesesNeeded>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_parentheses_with_struct_fields)]
+pub(crate) struct ParenthesesWithStructFields {
+    #[primary_span]
+    pub span: Span,
+    pub r#type: Path,
+    #[subdiagnostic]
+    pub braces_for_struct: BracesForStructLiteral,
+    #[subdiagnostic]
+    pub no_fields_for_fn: NoFieldsForFnCall,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion_braces_for_struct, applicability = "maybe-incorrect")]
+pub(crate) struct BracesForStructLiteral {
+    #[suggestion_part(code = " {{ ")]
+    pub first: Span,
+    #[suggestion_part(code = " }}")]
+    pub second: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
+pub(crate) struct NoFieldsForFnCall {
+    #[suggestion_part(code = "")]
+    pub fields: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_labeled_loop_in_break)]
+pub(crate) struct LabeledLoopInBreak {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: WrapInParentheses,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum WrapInParentheses {
+    #[multipart_suggestion(
+        parse_sugg_wrap_expression_in_parentheses,
+        applicability = "machine-applicable"
+    )]
+    Expression {
+        #[suggestion_part(code = "(")]
+        left: Span,
+        #[suggestion_part(code = ")")]
+        right: Span,
+    },
+    #[multipart_suggestion(
+        parse_sugg_wrap_macro_in_parentheses,
+        applicability = "machine-applicable"
+    )]
+    MacroArgs {
+        #[suggestion_part(code = "(")]
+        left: Span,
+        #[suggestion_part(code = ")")]
+        right: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_array_brackets_instead_of_braces)]
+pub(crate) struct ArrayBracketsInsteadOfBraces {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: ArrayBracketsInsteadOfBracesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct ArrayBracketsInsteadOfBracesSugg {
+    #[suggestion_part(code = "[")]
+    pub left: Span,
+    #[suggestion_part(code = "]")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_match_arm_body_without_braces)]
+pub(crate) struct MatchArmBodyWithoutBraces {
+    #[primary_span]
+    #[label(parse_label_statements)]
+    pub statements: Span,
+    #[label(parse_label_arrow)]
+    pub arrow: Span,
+    pub num_statements: usize,
+    #[subdiagnostic]
+    pub sub: MatchArmBodyWithoutBracesSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_extra_equals)]
+#[note]
+pub(crate) struct InclusiveRangeExtraEquals {
+    #[primary_span]
+    #[suggestion(
+        parse_suggestion_remove_eq,
+        style = "verbose",
+        code = "..=",
+        applicability = "maybe-incorrect"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_match_arrow)]
+pub(crate) struct InclusiveRangeMatchArrow {
+    #[primary_span]
+    pub arrow: Span,
+    #[label]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = " ", applicability = "machine-applicable")]
+    pub after_pat: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_no_end, code = E0586)]
+#[note]
+pub(crate) struct InclusiveRangeNoEnd {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(
+        parse_suggestion_open_range,
+        code = "",
+        applicability = "machine-applicable",
+        style = "verbose"
+    )]
+    pub suggestion: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MatchArmBodyWithoutBracesSugg {
+    #[multipart_suggestion(parse_suggestion_add_braces, applicability = "machine-applicable")]
+    AddBraces {
+        #[suggestion_part(code = "{{ ")]
+        left: Span,
+        #[suggestion_part(code = " }}")]
+        right: Span,
+    },
+    #[suggestion(
+        parse_suggestion_use_comma_not_semicolon,
+        code = ",",
+        applicability = "machine-applicable",
+        style = "verbose"
+    )]
+    UseComma {
+        #[primary_span]
+        semicolon: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_struct_literal_not_allowed_here)]
+pub(crate) struct StructLiteralNotAllowedHere {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: StructLiteralNotAllowedHereSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct StructLiteralNotAllowedHereSugg {
+    #[suggestion_part(code = "(")]
+    pub left: Span,
+    #[suggestion_part(code = ")")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_literal_suffix_on_tuple_index)]
+pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub suffix: Symbol,
+    #[help(parse_tuple_exception_line_1)]
+    #[help(parse_tuple_exception_line_2)]
+    #[help(parse_tuple_exception_line_3)]
+    pub exception: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_non_string_abi_literal)]
+pub(crate) struct NonStringAbiLiteral {
+    #[primary_span]
+    #[suggestion(code = "\"C\"", applicability = "maybe-incorrect", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_mismatched_closing_delimiter)]
+pub(crate) struct MismatchedClosingDelimiter {
+    #[primary_span]
+    pub spans: Vec<Span>,
+    pub delimiter: String,
+    #[label(parse_label_unmatched)]
+    pub unmatched: Span,
+    #[label(parse_label_opening_candidate)]
+    pub opening_candidate: Option<Span>,
+    #[label(parse_label_unclosed)]
+    pub unclosed: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_visibility_restriction, code = E0704)]
+#[help]
+pub(crate) struct IncorrectVisibilityRestriction {
+    #[primary_span]
+    #[suggestion(code = "in {inner_str}", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    pub inner_str: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_assignment_else_not_allowed)]
+pub(crate) struct AssignmentElseNotAllowed {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_statement_after_outer_attr)]
+pub(crate) struct ExpectedStatementAfterOuterAttr {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_doc_comment_does_not_document_anything, code = E0585)]
+#[help]
+pub(crate) struct DocCommentDoesNotDocumentAnything {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = ",", applicability = "machine-applicable", style = "verbose")]
+    pub missing_comma: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_let_mutually_exclusive)]
+pub(crate) struct ConstLetMutuallyExclusive {
+    #[primary_span]
+    #[suggestion(code = "const", applicability = "maybe-incorrect", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_expression_in_let_else)]
+pub(crate) struct InvalidExpressionInLetElse {
+    #[primary_span]
+    pub span: Span,
+    pub operator: &'static str,
+    #[subdiagnostic]
+    pub sugg: WrapInParentheses,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_curly_in_let_else)]
+pub(crate) struct InvalidCurlyInLetElse {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: WrapInParentheses,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_compound_assignment_expression_in_let)]
+#[help]
+pub(crate) struct CompoundAssignmentExpressionInLet {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = "", applicability = "maybe-incorrect")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_suffixed_literal_in_attribute)]
+#[help]
+pub(crate) struct SuffixedLiteralInAttribute {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_meta_item)]
+pub(crate) struct InvalidMetaItem {
+    #[primary_span]
+    pub span: Span,
+    pub descr: String,
+    #[subdiagnostic]
+    pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_quote_ident_sugg, applicability = "machine-applicable")]
+pub(crate) struct InvalidMetaItemQuoteIdentSugg {
+    #[suggestion_part(code = "\"")]
+    pub before: Span,
+    #[suggestion_part(code = "\"")]
+    pub after: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_sugg_escape_identifier,
+    style = "verbose",
+    applicability = "maybe-incorrect",
+    code = "r#"
+)]
+pub(crate) struct SuggEscapeIdentifier {
+    #[primary_span]
+    pub span: Span,
+    pub ident_name: String,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_sugg_remove_comma,
+    applicability = "machine-applicable",
+    code = "",
+    style = "verbose"
+)]
+pub(crate) struct SuggRemoveComma {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_sugg_add_let_for_stmt,
+    style = "verbose",
+    applicability = "maybe-incorrect",
+    code = "let "
+)]
+pub(crate) struct SuggAddMissingLetStmt {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ExpectedIdentifierFound {
+    #[label(parse_expected_identifier_found_reserved_identifier)]
+    ReservedIdentifier(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_keyword)]
+    Keyword(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_reserved_keyword)]
+    ReservedKeyword(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_doc_comment)]
+    DocComment(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_metavar)]
+    MetaVar(#[primary_span] Span),
+    #[label(parse_expected_identifier)]
+    Other(#[primary_span] Span),
+}
+
+impl ExpectedIdentifierFound {
+    pub(crate) fn new(token_descr: Option<TokenDescription>, span: Span) -> Self {
+        (match token_descr {
+            Some(TokenDescription::ReservedIdentifier) => {
+                ExpectedIdentifierFound::ReservedIdentifier
+            }
+            Some(TokenDescription::Keyword) => ExpectedIdentifierFound::Keyword,
+            Some(TokenDescription::ReservedKeyword) => ExpectedIdentifierFound::ReservedKeyword,
+            Some(TokenDescription::DocComment) => ExpectedIdentifierFound::DocComment,
+            Some(TokenDescription::MetaVar(_)) => ExpectedIdentifierFound::MetaVar,
+            None => ExpectedIdentifierFound::Other,
+        })(span)
+    }
+}
+
+pub(crate) struct ExpectedIdentifier {
+    pub span: Span,
+    pub token: Token,
+    pub suggest_raw: Option<SuggEscapeIdentifier>,
+    pub suggest_remove_comma: Option<SuggRemoveComma>,
+    pub help_cannot_start_number: Option<HelpIdentifierStartsWithNumber>,
+}
+
+impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedIdentifier {
+    #[track_caller]
+    fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> {
+        let token_descr = TokenDescription::from_token(&self.token);
+
+        let mut add_token = true;
+        let mut diag = Diag::new(
+            dcx,
+            level,
+            match token_descr {
+                Some(TokenDescription::ReservedIdentifier) => {
+                    fluent::parse_expected_identifier_found_reserved_identifier_str
+                }
+                Some(TokenDescription::Keyword) => {
+                    fluent::parse_expected_identifier_found_keyword_str
+                }
+                Some(TokenDescription::ReservedKeyword) => {
+                    fluent::parse_expected_identifier_found_reserved_keyword_str
+                }
+                Some(TokenDescription::DocComment) => {
+                    fluent::parse_expected_identifier_found_doc_comment_str
+                }
+                Some(TokenDescription::MetaVar(_)) => {
+                    add_token = false;
+                    fluent::parse_expected_identifier_found_metavar_str
+                }
+                None => fluent::parse_expected_identifier_found_str,
+            },
+        );
+        diag.span(self.span);
+        if add_token {
+            diag.arg("token", self.token);
+        }
+
+        if let Some(sugg) = self.suggest_raw {
+            sugg.add_to_diag(&mut diag);
+        }
+
+        ExpectedIdentifierFound::new(token_descr, self.span).add_to_diag(&mut diag);
+
+        if let Some(sugg) = self.suggest_remove_comma {
+            sugg.add_to_diag(&mut diag);
+        }
+
+        if let Some(help) = self.help_cannot_start_number {
+            help.add_to_diag(&mut diag);
+        }
+
+        diag
+    }
+}
+
+#[derive(Subdiagnostic)]
+#[help(parse_invalid_identifier_with_leading_number)]
+pub(crate) struct HelpIdentifierStartsWithNumber {
+    #[primary_span]
+    pub num_span: Span,
+}
+
+pub(crate) struct ExpectedSemi {
+    pub span: Span,
+    pub token: Token,
+
+    pub unexpected_token_label: Option<Span>,
+    pub sugg: ExpectedSemiSugg,
+}
+
+impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedSemi {
+    #[track_caller]
+    fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> {
+        let token_descr = TokenDescription::from_token(&self.token);
+
+        let mut add_token = true;
+        let mut diag = Diag::new(
+            dcx,
+            level,
+            match token_descr {
+                Some(TokenDescription::ReservedIdentifier) => {
+                    fluent::parse_expected_semi_found_reserved_identifier_str
+                }
+                Some(TokenDescription::Keyword) => fluent::parse_expected_semi_found_keyword_str,
+                Some(TokenDescription::ReservedKeyword) => {
+                    fluent::parse_expected_semi_found_reserved_keyword_str
+                }
+                Some(TokenDescription::DocComment) => {
+                    fluent::parse_expected_semi_found_doc_comment_str
+                }
+                Some(TokenDescription::MetaVar(_)) => {
+                    add_token = false;
+                    fluent::parse_expected_semi_found_metavar_str
+                }
+                None => fluent::parse_expected_semi_found_str,
+            },
+        );
+        diag.span(self.span);
+        if add_token {
+            diag.arg("token", self.token);
+        }
+
+        if let Some(unexpected_token_label) = self.unexpected_token_label {
+            diag.span_label(unexpected_token_label, fluent::parse_label_unexpected_token);
+        }
+
+        self.sugg.add_to_diag(&mut diag);
+
+        diag
+    }
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ExpectedSemiSugg {
+    #[suggestion(
+        parse_sugg_change_this_to_semi,
+        code = ";",
+        applicability = "machine-applicable",
+        style = "short"
+    )]
+    ChangeToSemi(#[primary_span] Span),
+    #[suggestion(
+        parse_sugg_add_semi,
+        code = ";",
+        applicability = "machine-applicable",
+        style = "short"
+    )]
+    AddSemi(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_struct_literal_body_without_path)]
+pub(crate) struct StructLiteralBodyWithoutPath {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: StructLiteralBodyWithoutPathSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "has-placeholders")]
+pub(crate) struct StructLiteralBodyWithoutPathSugg {
+    #[suggestion_part(code = "{{ SomeStruct ")]
+    pub before: Span,
+    #[suggestion_part(code = " }}")]
+    pub after: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unmatched_angle_brackets)]
+pub(crate) struct UnmatchedAngleBrackets {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    pub num_extra_brackets: usize,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_generic_parameters_without_angle_brackets)]
+pub(crate) struct GenericParamsWithoutAngleBrackets {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: GenericParamsWithoutAngleBracketsSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
+    #[suggestion_part(code = "<")]
+    pub left: Span,
+    #[suggestion_part(code = ">")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_comparison_operators_cannot_be_chained)]
+pub(crate) struct ComparisonOperatorsCannotBeChained {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[suggestion(
+        parse_sugg_turbofish_syntax,
+        style = "verbose",
+        code = "::",
+        applicability = "maybe-incorrect"
+    )]
+    pub suggest_turbofish: Option<Span>,
+    #[help(parse_sugg_turbofish_syntax)]
+    #[help(parse_sugg_parentheses_for_function_args)]
+    pub help_turbofish: bool,
+    #[subdiagnostic]
+    pub chaining_sugg: Option<ComparisonOperatorsCannotBeChainedSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
+    #[suggestion(
+        parse_sugg_split_comparison,
+        style = "verbose",
+        code = " && {middle_term}",
+        applicability = "maybe-incorrect"
+    )]
+    SplitComparison {
+        #[primary_span]
+        span: Span,
+        middle_term: String,
+    },
+    #[multipart_suggestion(parse_sugg_parenthesize, applicability = "maybe-incorrect")]
+    Parenthesize {
+        #[suggestion_part(code = "(")]
+        left: Span,
+        #[suggestion_part(code = ")")]
+        right: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_question_mark_in_type)]
+pub(crate) struct QuestionMarkInType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: QuestionMarkInTypeSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct QuestionMarkInTypeSugg {
+    #[suggestion_part(code = "Option<")]
+    pub left: Span,
+    #[suggestion_part(code = ">")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_parentheses_in_for_head)]
+pub(crate) struct ParenthesesInForHead {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: ParenthesesInForHeadSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ParenthesesInForHeadSugg {
+    #[suggestion_part(code = " ")]
+    pub left: Span,
+    #[suggestion_part(code = " ")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_parentheses_in_match_arm_pattern)]
+pub(crate) struct ParenthesesInMatchPat {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: ParenthesesInMatchPatSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ParenthesesInMatchPatSugg {
+    #[suggestion_part(code = "")]
+    pub left: Span,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_doc_comment_on_param_type)]
+pub(crate) struct DocCommentOnParamType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attribute_on_param_type)]
+pub(crate) struct AttributeOnParamType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attribute_on_type)]
+pub(crate) struct AttributeOnType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")]
+    pub fix_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attribute_on_generic_arg)]
+pub(crate) struct AttributeOnGenericArg {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "machine-applicable", style = "tool-only")]
+    pub fix_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attribute_on_empty_type)]
+pub(crate) struct AttributeOnEmptyType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_pattern_method_param_without_body, code = E0642)]
+pub(crate) struct PatternMethodParamWithoutBody {
+    #[primary_span]
+    #[suggestion(code = "_", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_self_param_not_first)]
+pub(crate) struct SelfParamNotFirst {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_generic_without_braces)]
+pub(crate) struct ConstGenericWithoutBraces {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: ConstGenericWithoutBracesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ConstGenericWithoutBracesSugg {
+    #[suggestion_part(code = "{{ ")]
+    pub left: Span,
+    #[suggestion_part(code = " }}")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_const_param_declaration)]
+pub(crate) struct UnexpectedConstParamDeclaration {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<UnexpectedConstParamDeclarationSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum UnexpectedConstParamDeclarationSugg {
+    #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+    AddParam {
+        #[suggestion_part(code = "<{snippet}>")]
+        impl_generics: Span,
+        #[suggestion_part(code = "{ident}")]
+        incorrect_decl: Span,
+        snippet: String,
+        ident: String,
+    },
+    #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+    AppendParam {
+        #[suggestion_part(code = ", {snippet}")]
+        impl_generics_end: Span,
+        #[suggestion_part(code = "{ident}")]
+        incorrect_decl: Span,
+        snippet: String,
+        ident: String,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_const_in_generic_param)]
+pub(crate) struct UnexpectedConstInGenericParam {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = "", applicability = "maybe-incorrect")]
+    pub to_remove: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_move_order_incorrect)]
+pub(crate) struct AsyncMoveOrderIncorrect {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "async move", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_use_order_incorrect)]
+pub(crate) struct AsyncUseOrderIncorrect {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "async use", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_double_colon_in_bound)]
+pub(crate) struct DoubleColonInBound {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = ": ", applicability = "machine-applicable", style = "verbose")]
+    pub between: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_ptr_with_generics)]
+pub(crate) struct FnPtrWithGenerics {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<FnPtrWithGenericsSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_misplaced_return_type,
+    style = "verbose",
+    applicability = "maybe-incorrect"
+)]
+pub(crate) struct MisplacedReturnType {
+    #[suggestion_part(code = " {snippet}")]
+    pub fn_params_end: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub ret_ty_span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct FnPtrWithGenericsSugg {
+    #[suggestion_part(code = "{snippet}")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+    pub arity: usize,
+    pub for_param_list_exists: bool,
+}
+
+pub(crate) struct FnTraitMissingParen {
+    pub span: Span,
+}
+
+impl Subdiagnostic for FnTraitMissingParen {
+    fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
+        diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren);
+        diag.span_suggestion_short(
+            self.span.shrink_to_hi(),
+            crate::fluent_generated::parse_add_paren,
+            "()",
+            Applicability::MachineApplicable,
+        );
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_if_with_if)]
+pub(crate) struct UnexpectedIfWithIf(
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = " ", style = "verbose")]
+    pub Span,
+);
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_fn_typo_with_impl)]
+pub(crate) struct FnTypoWithImpl {
+    #[primary_span]
+    #[suggestion(applicability = "maybe-incorrect", code = "impl", style = "verbose")]
+    pub fn_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_fn_path_found_fn_keyword)]
+pub(crate) struct ExpectedFnPathFoundFnKeyword {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "Fn", style = "verbose")]
+    pub fn_token_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_path_found_named_params)]
+pub(crate) struct FnPathFoundNamedParams {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "")]
+    pub named_param_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_path_found_c_variadic_params)]
+pub(crate) struct PathFoundCVariadicParams {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_path_found_attribute_in_params)]
+pub(crate) struct PathFoundAttributeInParams {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_path_double_colon)]
+pub(crate) struct PathSingleColon {
+    #[primary_span]
+    pub span: Span,
+
+    #[suggestion(applicability = "machine-applicable", code = ":", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_path_double_colon)]
+pub(crate) struct PathTripleColon {
+    #[primary_span]
+    #[suggestion(applicability = "maybe-incorrect", code = "", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_colon_as_semi)]
+pub(crate) struct ColonAsSemi {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = ";", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_clause_before_tuple_struct_body)]
+pub(crate) struct WhereClauseBeforeTupleStructBody {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[label(parse_name_label)]
+    pub name: Span,
+    #[label(parse_body_label)]
+    pub body: Span,
+    #[subdiagnostic]
+    pub sugg: Option<WhereClauseBeforeTupleStructBodySugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WhereClauseBeforeTupleStructBodySugg {
+    #[suggestion_part(code = "{snippet}")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_fn_in_2015, code = E0670)]
+pub(crate) struct AsyncFnIn2015 {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub help: HelpUseLatestEdition,
+}
+
+#[derive(Subdiagnostic)]
+#[label(parse_async_block_in_2015)]
+pub(crate) struct AsyncBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_move_block_in_2015)]
+pub(crate) struct AsyncMoveBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_use_block_in_2015)]
+pub(crate) struct AsyncUseBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_bound_modifier_in_2015)]
+pub(crate) struct AsyncBoundModifierIn2015 {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub help: HelpUseLatestEdition,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_let_chain_pre_2024)]
+pub(crate) struct LetChainPre2024 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_self_argument_pointer)]
+pub(crate) struct SelfArgumentPointer {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_dot)]
+pub(crate) struct UnexpectedTokenAfterDot {
+    #[primary_span]
+    pub span: Span,
+    pub actual: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_visibility_not_followed_by_item)]
+#[help]
+pub(crate) struct VisibilityNotFollowedByItem {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub vis: Visibility,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_default_not_followed_by_item)]
+#[note]
+pub(crate) struct DefaultNotFollowedByItem {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum MissingKeywordForItemDefinition {
+    #[diag(parse_missing_enum_for_enum_definition)]
+    Enum {
+        #[primary_span]
+        span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "enum ")]
+        insert_span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_enum_or_struct_for_item_definition)]
+    EnumOrStruct {
+        #[primary_span]
+        span: Span,
+    },
+    #[diag(parse_missing_struct_for_struct_definition)]
+    Struct {
+        #[primary_span]
+        span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "struct ")]
+        insert_span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_for_function_definition)]
+    Function {
+        #[primary_span]
+        span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "fn ")]
+        insert_span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_for_method_definition)]
+    Method {
+        #[primary_span]
+        span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "fn ")]
+        insert_span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_or_struct_for_item_definition)]
+    Ambiguous {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        subdiag: Option<AmbiguousMissingKwForItemSub>,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum AmbiguousMissingKwForItemSub {
+    #[suggestion(
+        parse_suggestion,
+        applicability = "maybe-incorrect",
+        code = "{snippet}!",
+        style = "verbose"
+    )]
+    SuggestMacro {
+        #[primary_span]
+        span: Span,
+        snippet: String,
+    },
+    #[help(parse_help)]
+    HelpMacro,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_fn_params)]
+pub(crate) struct MissingFnParams {
+    #[primary_span]
+    #[suggestion(code = "()", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_path_sep_in_fn_definition)]
+pub(crate) struct InvalidPathSepInFnDefinition {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_trait_in_trait_impl)]
+pub(crate) struct MissingTraitInTraitImpl {
+    #[primary_span]
+    #[suggestion(
+        parse_suggestion_add_trait,
+        code = " Trait ",
+        applicability = "has-placeholders",
+        style = "verbose"
+    )]
+    pub span: Span,
+    #[suggestion(
+        parse_suggestion_remove_for,
+        code = "",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    pub for_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_for_in_trait_impl)]
+pub(crate) struct MissingForInTraitImpl {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = " for ", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_trait_in_trait_impl_found_type)]
+pub(crate) struct ExpectedTraitInTraitImplFoundType {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extra_impl_keyword_in_trait_impl)]
+pub(crate) struct ExtraImplKeywordInTraitImpl {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "maybe-incorrect", style = "short")]
+    pub extra_impl_kw: Span,
+    #[note]
+    pub impl_trait_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bounds_not_allowed_on_trait_aliases)]
+pub(crate) struct BoundsNotAllowedOnTraitAliases {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_auto)]
+pub(crate) struct TraitAliasCannotBeAuto {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_auto)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_const)]
+pub(crate) struct TraitAliasCannotBeConst {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_const)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_unsafe)]
+pub(crate) struct TraitAliasCannotBeUnsafe {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_unsafe)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_associated_static_item_not_allowed)]
+pub(crate) struct AssociatedStaticItemNotAllowed {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_crate_name_with_dashes)]
+pub(crate) struct ExternCrateNameWithDashes {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: ExternCrateNameWithDashesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ExternCrateNameWithDashesSugg {
+    #[suggestion_part(code = "_")]
+    pub dashes: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_item_cannot_be_const)]
+#[note]
+pub(crate) struct ExternItemCannotBeConst {
+    #[primary_span]
+    pub ident_span: Span,
+    #[suggestion(code = "static ", applicability = "machine-applicable", style = "verbose")]
+    pub const_span: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_global_cannot_be_mutable)]
+pub(crate) struct ConstGlobalCannotBeMutable {
+    #[primary_span]
+    #[label]
+    pub ident_span: Span,
+    #[suggestion(code = "static", style = "verbose", applicability = "maybe-incorrect")]
+    pub const_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_const_type)]
+pub(crate) struct MissingConstType {
+    #[primary_span]
+    #[suggestion(code = "{colon} <type>", style = "verbose", applicability = "has-placeholders")]
+    pub span: Span,
+
+    pub kind: &'static str,
+    pub colon: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_struct_mutually_exclusive)]
+pub(crate) struct EnumStructMutuallyExclusive {
+    #[primary_span]
+    #[suggestion(code = "enum", style = "verbose", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedTokenAfterStructName {
+    #[diag(parse_unexpected_token_after_struct_name_found_reserved_identifier)]
+    ReservedIdentifier {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_keyword)]
+    Keyword {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_reserved_keyword)]
+    ReservedKeyword {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_doc_comment)]
+    DocComment {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_metavar)]
+    MetaVar {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_other)]
+    Other {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+}
+
+impl UnexpectedTokenAfterStructName {
+    pub(crate) fn new(span: Span, token: Token) -> Self {
+        match TokenDescription::from_token(&token) {
+            Some(TokenDescription::ReservedIdentifier) => Self::ReservedIdentifier { span, token },
+            Some(TokenDescription::Keyword) => Self::Keyword { span, token },
+            Some(TokenDescription::ReservedKeyword) => Self::ReservedKeyword { span, token },
+            Some(TokenDescription::DocComment) => Self::DocComment { span, token },
+            Some(TokenDescription::MetaVar(_)) => Self::MetaVar { span },
+            None => Self::Other { span, token },
+        }
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_self_in_generic_parameters)]
+#[note]
+pub(crate) struct UnexpectedSelfInGenericParameters {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_default_value_for_lifetime_in_generic_parameters)]
+pub(crate) struct UnexpectedDefaultValueForLifetimeInGenericParameters {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_multiple_where_clauses)]
+pub(crate) struct MultipleWhereClauses {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub previous: Span,
+    #[suggestion(style = "verbose", code = ",", applicability = "maybe-incorrect")]
+    pub between: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedNonterminal {
+    #[diag(parse_nonterminal_expected_item_keyword)]
+    Item(#[primary_span] Span),
+    #[diag(parse_nonterminal_expected_statement)]
+    Statement(#[primary_span] Span),
+    #[diag(parse_nonterminal_expected_ident)]
+    Ident {
+        #[primary_span]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_nonterminal_expected_lifetime)]
+    Lifetime {
+        #[primary_span]
+        span: Span,
+        token: Token,
+    },
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowed {
+    #[diag(parse_or_pattern_not_allowed_in_let_binding)]
+    LetBinding {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        sub: Option<TopLevelOrPatternNotAllowedSugg>,
+    },
+    #[diag(parse_or_pattern_not_allowed_in_fn_parameters)]
+    FunctionParameter {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        sub: Option<TopLevelOrPatternNotAllowedSugg>,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cannot_be_raw_ident)]
+pub(crate) struct CannotBeRawIdent {
+    #[primary_span]
+    pub span: Span,
+    pub ident: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cannot_be_raw_lifetime)]
+pub(crate) struct CannotBeRawLifetime {
+    #[primary_span]
+    pub span: Span,
+    pub ident: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_keyword_lifetime)]
+pub(crate) struct KeywordLifetime {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_label)]
+pub(crate) struct InvalidLabel {
+    #[primary_span]
+    pub span: Span,
+    pub name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cr_doc_comment)]
+pub(crate) struct CrDocComment {
+    #[primary_span]
+    pub span: Span,
+    pub block: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_no_digits_literal, code = E0768)]
+pub(crate) struct NoDigitsLiteral {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_digit_literal)]
+pub(crate) struct InvalidDigitLiteral {
+    #[primary_span]
+    pub span: Span,
+    pub base: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_empty_exponent_float)]
+pub(crate) struct EmptyExponentFloat {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_float_literal_unsupported_base)]
+pub(crate) struct FloatLiteralUnsupportedBase {
+    #[primary_span]
+    pub span: Span,
+    pub base: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_prefix)]
+#[note]
+pub(crate) struct UnknownPrefix<'a> {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub prefix: &'a str,
+    #[subdiagnostic]
+    pub sugg: Option<UnknownPrefixSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[note(parse_macro_expands_to_adt_field)]
+pub(crate) struct MacroExpandsToAdtField<'a> {
+    pub adt_ty: &'a str,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum UnknownPrefixSugg {
+    #[suggestion(
+        parse_suggestion_br,
+        code = "br",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    UseBr(#[primary_span] Span),
+    #[suggestion(
+        parse_suggestion_cr,
+        code = "cr",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    UseCr(#[primary_span] Span),
+    #[suggestion(
+        parse_suggestion_whitespace,
+        code = " ",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    Whitespace(#[primary_span] Span),
+    #[multipart_suggestion(
+        parse_suggestion_str,
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    MeantStr {
+        #[suggestion_part(code = "\"")]
+        start: Span,
+        #[suggestion_part(code = "\"")]
+        end: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_reserved_multihash)]
+#[note]
+pub(crate) struct ReservedMultihash {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<GuardedStringSugg>,
+}
+#[derive(Diagnostic)]
+#[diag(parse_reserved_string)]
+#[note]
+pub(crate) struct ReservedString {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<GuardedStringSugg>,
+}
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_suggestion_whitespace,
+    code = " ",
+    applicability = "maybe-incorrect",
+    style = "verbose"
+)]
+pub(crate) struct GuardedStringSugg(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_too_many_hashes)]
+pub(crate) struct TooManyHashes {
+    #[primary_span]
+    pub span: Span,
+    pub num: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_start_of_token)]
+pub(crate) struct UnknownTokenStart {
+    #[primary_span]
+    pub span: Span,
+    pub escaped: String,
+    #[subdiagnostic]
+    pub sugg: Option<TokenSubstitution>,
+    #[subdiagnostic]
+    pub null: Option<UnknownTokenNull>,
+    #[subdiagnostic]
+    pub repeat: Option<UnknownTokenRepeat>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum TokenSubstitution {
+    #[suggestion(
+        parse_sugg_quotes,
+        code = "{suggestion}",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    DirectedQuotes {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+        ascii_str: &'static str,
+        ascii_name: &'static str,
+    },
+    #[suggestion(
+        parse_sugg_other,
+        code = "{suggestion}",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    Other {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+        ch: String,
+        u_name: &'static str,
+        ascii_str: &'static str,
+        ascii_name: &'static str,
+    },
+}
+
+#[derive(Subdiagnostic)]
+#[note(parse_note_repeats)]
+pub(crate) struct UnknownTokenRepeat {
+    pub repeats: usize,
+}
+
+#[derive(Subdiagnostic)]
+#[help(parse_help_null)]
+pub(crate) struct UnknownTokenNull;
+
+#[derive(Diagnostic)]
+pub(crate) enum UnescapeError {
+    #[diag(parse_invalid_unicode_escape)]
+    #[help]
+    InvalidUnicodeEscape {
+        #[primary_span]
+        #[label]
+        span: Span,
+        surrogate: bool,
+    },
+    #[diag(parse_escape_only_char)]
+    EscapeOnlyChar {
+        #[primary_span]
+        span: Span,
+        #[suggestion(
+            parse_escape,
+            applicability = "machine-applicable",
+            code = "{escaped_sugg}",
+            style = "verbose"
+        )]
+        char_span: Span,
+        escaped_sugg: String,
+        escaped_msg: String,
+        byte: bool,
+    },
+    #[diag(parse_bare_cr)]
+    BareCr {
+        #[primary_span]
+        #[suggestion(
+            parse_escape,
+            applicability = "machine-applicable",
+            code = "\\r",
+            style = "verbose"
+        )]
+        span: Span,
+        double_quotes: bool,
+    },
+    #[diag(parse_bare_cr_in_raw_string)]
+    BareCrRawString(#[primary_span] Span),
+    #[diag(parse_too_short_hex_escape)]
+    TooShortHexEscape(#[primary_span] Span),
+    #[diag(parse_invalid_char_in_escape)]
+    InvalidCharInEscape {
+        #[primary_span]
+        #[label]
+        span: Span,
+        is_hex: bool,
+        ch: String,
+    },
+    #[diag(parse_out_of_range_hex_escape)]
+    OutOfRangeHexEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_leading_underscore_unicode_escape)]
+    LeadingUnderscoreUnicodeEscape {
+        #[primary_span]
+        #[label(parse_leading_underscore_unicode_escape_label)]
+        span: Span,
+        ch: String,
+    },
+    #[diag(parse_overlong_unicode_escape)]
+    OverlongUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_unclosed_unicode_escape)]
+    UnclosedUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+        #[suggestion(
+            parse_terminate,
+            code = "}}",
+            applicability = "maybe-incorrect",
+            style = "verbose"
+        )]
+        Span,
+    ),
+    #[diag(parse_no_brace_unicode_escape)]
+    NoBraceInUnicodeEscape {
+        #[primary_span]
+        span: Span,
+        #[label]
+        label: Option<Span>,
+        #[subdiagnostic]
+        sub: NoBraceUnicodeSub,
+    },
+    #[diag(parse_unicode_escape_in_byte)]
+    #[help]
+    UnicodeEscapeInByte(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_empty_unicode_escape)]
+    EmptyUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_zero_chars)]
+    ZeroChars(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_lone_slash)]
+    LoneSlash(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_unskipped_whitespace)]
+    UnskippedWhitespace {
+        #[primary_span]
+        span: Span,
+        #[label]
+        char_span: Span,
+        ch: String,
+    },
+    #[diag(parse_multiple_skipped_lines)]
+    MultipleSkippedLinesWarning(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_more_than_one_char)]
+    MoreThanOneChar {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        note: Option<MoreThanOneCharNote>,
+        #[subdiagnostic]
+        suggestion: MoreThanOneCharSugg,
+    },
+    #[diag(parse_nul_in_c_str)]
+    NulInCStr {
+        #[primary_span]
+        span: Span,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MoreThanOneCharSugg {
+    #[suggestion(
+        parse_consider_normalized,
+        code = "{normalized}",
+        applicability = "machine-applicable",
+        style = "verbose"
+    )]
+    NormalizedForm {
+        #[primary_span]
+        span: Span,
+        ch: String,
+        normalized: String,
+    },
+    #[suggestion(
+        parse_remove_non,
+        code = "{ch}",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    RemoveNonPrinting {
+        #[primary_span]
+        span: Span,
+        ch: String,
+    },
+    #[suggestion(
+        parse_use_double_quotes,
+        code = "{sugg}",
+        applicability = "machine-applicable",
+        style = "verbose"
+    )]
+    QuotesFull {
+        #[primary_span]
+        span: Span,
+        is_byte: bool,
+        sugg: String,
+    },
+    #[multipart_suggestion(parse_use_double_quotes, applicability = "machine-applicable")]
+    Quotes {
+        #[suggestion_part(code = "{prefix}\"")]
+        start: Span,
+        #[suggestion_part(code = "\"")]
+        end: Span,
+        is_byte: bool,
+        prefix: &'static str,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MoreThanOneCharNote {
+    #[note(parse_followed_by)]
+    AllCombining {
+        #[primary_span]
+        span: Span,
+        chr: String,
+        len: usize,
+        escaped_marks: String,
+    },
+    #[note(parse_non_printing)]
+    NonPrinting {
+        #[primary_span]
+        span: Span,
+        escaped: String,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum NoBraceUnicodeSub {
+    #[suggestion(
+        parse_use_braces,
+        code = "{suggestion}",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    Suggestion {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+    },
+    #[help(parse_format_of_unicode)]
+    Help,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_sugg_wrap_pattern_in_parens, applicability = "machine-applicable")]
+pub(crate) struct WrapInParens {
+    #[suggestion_part(code = "(")]
+    pub(crate) lo: Span,
+    #[suggestion_part(code = ")")]
+    pub(crate) hi: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowedSugg {
+    #[suggestion(
+        parse_sugg_remove_leading_vert_in_pattern,
+        code = "",
+        applicability = "machine-applicable",
+        style = "verbose"
+    )]
+    RemoveLeadingVert {
+        #[primary_span]
+        span: Span,
+    },
+    WrapInParens {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        suggestion: WrapInParens,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_before_function_parameter)]
+#[note(parse_note_pattern_alternatives_use_single_vert)]
+pub(crate) struct UnexpectedVertVertBeforeFunctionParam {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_in_pattern)]
+pub(crate) struct UnexpectedVertVertInPattern {
+    #[primary_span]
+    #[suggestion(code = "|", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    #[label(parse_label_while_parsing_or_pattern_here)]
+    pub start: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trailing_vert_not_allowed)]
+pub(crate) struct TrailingVertNotAllowed {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    #[label(parse_label_while_parsing_or_pattern_here)]
+    pub start: Option<Span>,
+    pub token: Token,
+    #[note(parse_note_pattern_alternatives_use_single_vert)]
+    pub note_double_vert: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dotdotdot_rest_pattern)]
+pub(crate) struct DotDotDotRestPattern {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = "", applicability = "machine-applicable")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_pattern_on_wrong_side_of_at)]
+pub(crate) struct PatternOnWrongSideOfAt {
+    #[primary_span]
+    #[suggestion(code = "{whole_pat}", applicability = "machine-applicable", style = "verbose")]
+    pub whole_span: Span,
+    pub whole_pat: String,
+    #[label(parse_label_pattern)]
+    pub pattern: Span,
+    #[label(parse_label_binding)]
+    pub binding: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_binding_left_of_at)]
+#[note]
+pub(crate) struct ExpectedBindingLeftOfAt {
+    #[primary_span]
+    pub whole_span: Span,
+    #[label(parse_label_lhs)]
+    pub lhs: Span,
+    #[label(parse_label_rhs)]
+    pub rhs: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_ambiguous_range_pattern_suggestion,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct ParenRangeSuggestion {
+    #[suggestion_part(code = "(")]
+    pub lo: Span,
+    #[suggestion_part(code = ")")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ambiguous_range_pattern)]
+pub(crate) struct AmbiguousRangePattern {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub suggestion: ParenRangeSuggestion,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_lifetime_in_pattern)]
+pub(crate) struct UnexpectedLifetimeInPattern {
+    #[primary_span]
+    pub span: Span,
+    pub symbol: Symbol,
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum InvalidMutInPattern {
+    #[diag(parse_mut_on_nested_ident_pattern)]
+    #[note(parse_note_mut_pattern_usage)]
+    NestedIdent {
+        #[primary_span]
+        #[suggestion(code = "{pat}", applicability = "machine-applicable", style = "verbose")]
+        span: Span,
+        pat: String,
+    },
+    #[diag(parse_mut_on_non_ident_pattern)]
+    #[note(parse_note_mut_pattern_usage)]
+    NonIdent {
+        #[primary_span]
+        #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+        span: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_repeated_mut_in_pattern)]
+pub(crate) struct RepeatedMutInPattern {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_range_to_pattern_not_allowed)]
+pub(crate) struct DotDotDotRangeToPatternNotAllowed {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "..=", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_pattern_instead_of_identifier)]
+pub(crate) struct EnumPatternInsteadOfIdentifier {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_at_dot_dot_in_struct_pattern)]
+pub(crate) struct AtDotDotInStructPattern {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", style = "verbose", applicability = "machine-applicable")]
+    pub remove: Span,
+    pub ident: Ident,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_at_in_struct_pattern)]
+#[note]
+#[help]
+pub(crate) struct AtInStructPattern {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_for_remaining_fields)]
+pub(crate) struct DotDotDotForRemainingFields {
+    #[primary_span]
+    #[suggestion(code = "..", style = "verbose", applicability = "machine-applicable")]
+    pub span: Span,
+    pub token_str: Cow<'static, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_comma_after_pattern_field)]
+pub(crate) struct ExpectedCommaAfterPatternField {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_expr_in_pat)]
+#[note]
+pub(crate) struct UnexpectedExpressionInPattern {
+    /// The unexpected expr's span.
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    /// Was a `RangePatternBound` expected?
+    pub is_bound: bool,
+    /// The unexpected expr's precedence (used in match arm guard suggestions).
+    pub expr_precedence: ExprPrecedence,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum UnexpectedExpressionInPatternSugg {
+    #[multipart_suggestion(
+        parse_unexpected_expr_in_pat_create_guard_sugg,
+        applicability = "maybe-incorrect"
+    )]
+    CreateGuard {
+        /// Where to put the suggested identifier.
+        #[suggestion_part(code = "{ident}")]
+        ident_span: Span,
+        /// Where to put the match arm.
+        #[suggestion_part(code = " if {ident} == {expr}")]
+        pat_hi: Span,
+        /// The suggested identifier.
+        ident: String,
+        /// The unexpected expression.
+        expr: String,
+    },
+
+    #[multipart_suggestion(
+        parse_unexpected_expr_in_pat_update_guard_sugg,
+        applicability = "maybe-incorrect"
+    )]
+    UpdateGuard {
+        /// Where to put the suggested identifier.
+        #[suggestion_part(code = "{ident}")]
+        ident_span: Span,
+        /// The beginning of the match arm guard's expression (insert a `(` if `Some`).
+        #[suggestion_part(code = "(")]
+        guard_lo: Option<Span>,
+        /// The end of the match arm guard's expression.
+        #[suggestion_part(code = "{guard_hi_paren} && {ident} == {expr}")]
+        guard_hi: Span,
+        /// Either `")"` or `""`.
+        guard_hi_paren: &'static str,
+        /// The suggested identifier.
+        ident: String,
+        /// The unexpected expression.
+        expr: String,
+    },
+
+    #[multipart_suggestion(
+        parse_unexpected_expr_in_pat_const_sugg,
+        applicability = "has-placeholders"
+    )]
+    Const {
+        /// Where to put the extracted constant declaration.
+        #[suggestion_part(code = "{indentation}const {ident}: /* Type */ = {expr};\n")]
+        stmt_lo: Span,
+        /// Where to put the suggested identifier.
+        #[suggestion_part(code = "{ident}")]
+        ident_span: Span,
+        /// The suggested identifier.
+        ident: String,
+        /// The unexpected expression.
+        expr: String,
+        /// The statement's block's indentation.
+        indentation: String,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_paren_in_range_pat)]
+pub(crate) struct UnexpectedParenInRangePat {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: UnexpectedParenInRangePatSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_unexpected_paren_in_range_pat_sugg,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct UnexpectedParenInRangePatSugg {
+    #[suggestion_part(code = "")]
+    pub start_span: Span,
+    #[suggestion_part(code = "")]
+    pub end_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_return_types_use_thin_arrow)]
+pub(crate) struct ReturnTypesUseThinArrow {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = " -> ", applicability = "machine-applicable")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_need_plus_after_trait_object_lifetime)]
+pub(crate) struct NeedPlusAfterTraitObjectLifetime {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = " + /* Trait */", applicability = "has-placeholders")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_mut_or_const_in_raw_pointer_type)]
+pub(crate) struct ExpectedMutOrConstInRawPointerType {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code("mut ", "const "), applicability = "has-placeholders", style = "verbose")]
+    pub after_asterisk: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_after_mut)]
+pub(crate) struct LifetimeAfterMut {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "&{snippet} mut", applicability = "maybe-incorrect", style = "verbose")]
+    pub suggest_lifetime: Option<Span>,
+    pub snippet: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dyn_after_mut)]
+pub(crate) struct DynAfterMut {
+    #[primary_span]
+    #[suggestion(code = "&mut dyn", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_const)]
+#[note]
+pub(crate) struct FnPointerCannotBeConst {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_async)]
+#[note]
+pub(crate) struct FnPointerCannotBeAsync {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_nested_c_variadic_type, code = E0743)]
+pub(crate) struct NestedCVariadicType {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_dyn_keyword)]
+#[help]
+pub(crate) struct InvalidDynKeyword {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum HelpUseLatestEdition {
+    #[help(parse_help_set_edition_cargo)]
+    #[note(parse_note_edition_guide)]
+    Cargo { edition: Edition },
+    #[help(parse_help_set_edition_standalone)]
+    #[note(parse_note_edition_guide)]
+    Standalone { edition: Edition },
+}
+
+impl HelpUseLatestEdition {
+    pub(crate) fn new() -> Self {
+        let edition = LATEST_STABLE_EDITION;
+        if rustc_session::utils::was_invoked_from_cargo() {
+            Self::Cargo { edition }
+        } else {
+            Self::Standalone { edition }
+        }
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_box_syntax_removed)]
+pub(crate) struct BoxSyntaxRemoved {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: AddBoxNew,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_box_syntax_removed_suggestion,
+    applicability = "machine-applicable",
+    style = "verbose"
+)]
+pub(crate) struct AddBoxNew {
+    #[suggestion_part(code = "Box::new(")]
+    pub box_kw_and_lo: Span,
+    #[suggestion_part(code = ")")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_return_type_notation_output)]
+pub(crate) struct BadReturnTypeNotationOutput {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")]
+    pub suggestion: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_assoc_type_bounds)]
+pub(crate) struct BadAssocTypeBounds {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attr_after_generic)]
+pub(crate) struct AttrAfterGeneric {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attr_without_generics)]
+pub(crate) struct AttrWithoutGenerics {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_generics)]
+pub(crate) struct WhereOnGenerics {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_generics_in_path)]
+pub(crate) struct GenericsInPath {
+    #[primary_span]
+    pub span: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_in_eq_constraint)]
+#[help]
+pub(crate) struct LifetimeInEqConstraint {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub lifetime: Ident,
+    #[label(parse_context_label)]
+    pub binding_label: Span,
+    #[suggestion(
+        parse_colon_sugg,
+        style = "verbose",
+        applicability = "maybe-incorrect",
+        code = ": "
+    )]
+    pub colon_sugg: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_modifier_lifetime)]
+pub(crate) struct ModifierLifetime {
+    #[primary_span]
+    #[suggestion(style = "tool-only", applicability = "maybe-incorrect", code = "")]
+    pub span: Span,
+    pub modifier: &'static str,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_parenthesized_lifetime_suggestion,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct RemoveParens {
+    #[suggestion_part(code = "")]
+    pub lo: Span,
+    #[suggestion_part(code = "")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_parenthesized_lifetime)]
+pub(crate) struct ParenthesizedLifetime {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: RemoveParens,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_underscore_literal_suffix)]
+pub(crate) struct UnderscoreLiteralSuffix {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expect_label_found_ident)]
+pub(crate) struct ExpectedLabelFoundIdent {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "'", applicability = "machine-applicable", style = "verbose")]
+    pub start: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inappropriate_default)]
+#[note]
+pub(crate) struct InappropriateDefault {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub article: &'static str,
+    pub descr: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_recover_import_as_use)]
+pub(crate) struct RecoverImportAsUse {
+    #[primary_span]
+    #[suggestion(code = "use", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    pub token_name: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_single_colon_import_path)]
+#[note]
+pub(crate) struct SingleColonImportPath {
+    #[primary_span]
+    #[suggestion(code = "::", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_item_kind)]
+pub(crate) struct BadItemKind {
+    #[primary_span]
+    pub span: Span,
+    pub descr: &'static str,
+    pub ctx: &'static str,
+    #[help]
+    pub help: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_rules_missing_bang)]
+pub(crate) struct MacroRulesMissingBang {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "!", applicability = "machine-applicable", style = "verbose")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_name_remove_bang)]
+pub(crate) struct MacroNameRemoveBang {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "short")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_rules_visibility)]
+pub(crate) struct MacroRulesVisibility<'a> {
+    #[primary_span]
+    #[suggestion(code = "#[macro_export]", applicability = "maybe-incorrect", style = "verbose")]
+    pub span: Span,
+    pub vis: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_invocation_visibility)]
+#[help]
+pub(crate) struct MacroInvocationVisibility<'a> {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    pub vis: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_nested_adt)]
+pub(crate) struct NestedAdt<'a> {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")]
+    pub item: Span,
+    pub keyword: &'a str,
+    pub kw_str: Cow<'a, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_function_body_equals_expr)]
+pub(crate) struct FunctionBodyEqualsExpr {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: FunctionBodyEqualsExprSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct FunctionBodyEqualsExprSugg {
+    #[suggestion_part(code = "{{")]
+    pub eq: Span,
+    #[suggestion_part(code = " }}")]
+    pub semi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_box_not_pat)]
+pub(crate) struct BoxNotPat {
+    #[primary_span]
+    pub span: Span,
+    #[note]
+    pub kw: Span,
+    #[suggestion(code = "r#", applicability = "maybe-incorrect", style = "verbose")]
+    pub lo: Span,
+    pub descr: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unmatched_angle)]
+pub(crate) struct UnmatchedAngle {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable", style = "verbose")]
+    pub span: Span,
+    pub plural: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_plus_in_bounds)]
+pub(crate) struct MissingPlusBounds {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = " +", applicability = "maybe-incorrect", style = "verbose")]
+    pub hi: Span,
+    pub sym: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_parens_trait_bounds)]
+pub(crate) struct IncorrectParensTraitBounds {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: IncorrectParensTraitBoundsSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_incorrect_parens_trait_bounds_sugg,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct IncorrectParensTraitBoundsSugg {
+    #[suggestion_part(code = " ")]
+    pub wrong_span: Span,
+    #[suggestion_part(code = "(")]
+    pub new_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_kw_bad_case)]
+pub(crate) struct KwBadCase<'a> {
+    #[primary_span]
+    #[suggestion(code = "{kw}", style = "verbose", applicability = "machine-applicable")]
+    pub span: Span,
+    pub kw: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_meta_bad_delim)]
+pub(crate) struct MetaBadDelim {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: MetaBadDelimSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cfg_attr_bad_delim)]
+pub(crate) struct CfgAttrBadDelim {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: MetaBadDelimSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_meta_bad_delim_suggestion, applicability = "machine-applicable")]
+pub(crate) struct MetaBadDelimSugg {
+    #[suggestion_part(code = "(")]
+    pub open: Span,
+    #[suggestion_part(code = ")")]
+    pub close: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_malformed_cfg_attr)]
+#[note]
+pub(crate) struct MalformedCfgAttr {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "{sugg}")]
+    pub span: Span,
+    pub sugg: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_builtin_construct)]
+pub(crate) struct UnknownBuiltinConstruct {
+    #[primary_span]
+    pub span: Span,
+    pub name: Ident,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_builtin_ident)]
+pub(crate) struct ExpectedBuiltinIdent {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_static_with_generics)]
+pub(crate) struct StaticWithGenerics {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_clause_before_const_body)]
+pub(crate) struct WhereClauseBeforeConstBody {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[label(parse_name_label)]
+    pub name: Span,
+    #[label(parse_body_label)]
+    pub body: Span,
+    #[subdiagnostic]
+    pub sugg: Option<WhereClauseBeforeConstBodySugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WhereClauseBeforeConstBodySugg {
+    #[suggestion_part(code = "= {snippet} ")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_generic_args_in_pat_require_turbofish_syntax)]
+pub(crate) struct GenericArgsInPatRequireTurbofishSyntax {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(
+        parse_sugg_turbofish_syntax,
+        style = "verbose",
+        code = "::",
+        applicability = "maybe-incorrect"
+    )]
+    pub suggest_turbofish: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_transpose_dyn_or_impl)]
+pub(crate) struct TransposeDynOrImpl<'a> {
+    #[primary_span]
+    pub span: Span,
+    pub kw: &'a str,
+    #[subdiagnostic]
+    pub sugg: TransposeDynOrImplSugg<'a>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct TransposeDynOrImplSugg<'a> {
+    #[suggestion_part(code = "")]
+    pub removal_span: Span,
+    #[suggestion_part(code = "{kw} ")]
+    pub insertion_span: Span,
+    pub kw: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_array_index_offset_of)]
+pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_offset_of)]
+pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_async_impl)]
+pub(crate) struct AsyncImpl {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expr_rarrow_call)]
+#[help]
+pub(crate) struct ExprRArrowCall {
+    #[primary_span]
+    #[suggestion(style = "verbose", applicability = "machine-applicable", code = ".")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_range_attribute)]
+pub(crate) struct DotDotRangeAttribute {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_attr_unsafe)]
+#[note]
+pub(crate) struct InvalidAttrUnsafe {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub name: Path,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unsafe_attr_outside_unsafe)]
+pub(crate) struct UnsafeAttrOutsideUnsafe {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub suggestion: UnsafeAttrOutsideUnsafeSuggestion,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_unsafe_attr_outside_unsafe_suggestion,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion {
+    #[suggestion_part(code = "unsafe(")]
+    pub left: Span,
+    #[suggestion_part(code = ")")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_binder_before_modifiers)]
+pub(crate) struct BinderBeforeModifiers {
+    #[primary_span]
+    pub binder_span: Span,
+    #[label]
+    pub modifiers_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_binder_and_polarity)]
+pub(crate) struct BinderAndPolarity {
+    #[primary_span]
+    pub polarity_span: Span,
+    #[label]
+    pub binder_span: Span,
+    pub polarity: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_modifiers_and_polarity)]
+pub(crate) struct PolarityAndModifiers {
+    #[primary_span]
+    pub polarity_span: Span,
+    #[label]
+    pub modifiers_span: Span,
+    pub polarity: &'static str,
+    pub modifiers_concatenated: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_type_on_self)]
+pub(crate) struct IncorrectTypeOnSelf {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub move_self_modifier: MoveSelfModifier,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct MoveSelfModifier {
+    #[suggestion_part(code = "")]
+    pub removal_span: Span,
+    #[suggestion_part(code = "{modifier}")]
+    pub insertion_span: Span,
+    pub modifier: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_unsupported_operand)]
+pub(crate) struct AsmUnsupportedOperand<'a> {
+    #[primary_span]
+    #[label]
+    pub(crate) span: Span,
+    pub(crate) symbol: &'a str,
+    pub(crate) macro_name: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_underscore_input)]
+pub(crate) struct AsmUnderscoreInput {
+    #[primary_span]
+    pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_sym_no_path)]
+pub(crate) struct AsmSymNoPath {
+    #[primary_span]
+    pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_requires_template)]
+pub(crate) struct AsmRequiresTemplate {
+    #[primary_span]
+    pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_expected_comma)]
+pub(crate) struct AsmExpectedComma {
+    #[primary_span]
+    #[label]
+    pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_expected_other)]
+pub(crate) struct AsmExpectedOther {
+    #[primary_span]
+    #[label(parse_asm_expected_other)]
+    pub(crate) span: Span,
+    pub(crate) is_inline_asm: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_non_abi)]
+pub(crate) struct NonABI {
+    #[primary_span]
+    pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_expected_string_literal)]
+pub(crate) struct AsmExpectedStringLiteral {
+    #[primary_span]
+    #[label]
+    pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_asm_expected_register_class_or_explicit_register)]
+pub(crate) struct ExpectedRegisterClassOrExplicitRegister {
+    #[primary_span]
+    pub(crate) span: Span,
+}
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
new file mode 100644
index 00000000000..947f3df179f
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -0,0 +1,154 @@
+use rustc_ast::token::Delimiter;
+use rustc_errors::Diag;
+use rustc_session::parse::ParseSess;
+use rustc_span::Span;
+use rustc_span::source_map::SourceMap;
+
+use super::UnmatchedDelim;
+use crate::errors::MismatchedClosingDelimiter;
+use crate::pprust;
+
+#[derive(Default)]
+pub(super) struct TokenTreeDiagInfo {
+    /// Stack of open delimiters and their spans. Used for error message.
+    pub open_delimiters: Vec<(Delimiter, Span)>,
+    pub unmatched_delims: Vec<UnmatchedDelim>,
+
+    /// Used only for error recovery when arriving to EOF with mismatched braces.
+    pub last_unclosed_found_span: Option<Span>,
+
+    /// Collect empty block spans that might have been auto-inserted by editors.
+    pub empty_block_spans: Vec<Span>,
+
+    /// Collect the spans of braces (Open, Close). Used only
+    /// for detecting if blocks are empty and only braces.
+    pub matching_block_spans: Vec<(Span, Span)>,
+}
+
+pub(super) fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
+    match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
+        (Some(open_padding), Some(close_padding)) => open_padding == close_padding,
+        _ => false,
+    }
+}
+
+// When we get a `)` or `]` for `{`, we should emit help message here
+// it's more friendly compared to report `unmatched error` in later phase
+pub(super) fn report_missing_open_delim(
+    err: &mut Diag<'_>,
+    unmatched_delims: &mut Vec<UnmatchedDelim>,
+) -> bool {
+    let mut reported_missing_open = false;
+    unmatched_delims.retain(|unmatch_brace| {
+        if let Some(delim) = unmatch_brace.found_delim
+            && matches!(delim, Delimiter::Parenthesis | Delimiter::Bracket)
+        {
+            let missed_open = match delim {
+                Delimiter::Parenthesis => "(",
+                Delimiter::Bracket => "[",
+                _ => unreachable!(),
+            };
+
+            if let Some(unclosed_span) = unmatch_brace.unclosed_span {
+                err.span_label(unclosed_span, "the nearest open delimiter");
+            }
+            err.span_label(
+                unmatch_brace.found_span.shrink_to_lo(),
+                format!("missing open `{missed_open}` for this delimiter"),
+            );
+            reported_missing_open = true;
+            false
+        } else {
+            true
+        }
+    });
+    reported_missing_open
+}
+
+pub(super) fn report_suspicious_mismatch_block(
+    err: &mut Diag<'_>,
+    diag_info: &TokenTreeDiagInfo,
+    sm: &SourceMap,
+    delim: Delimiter,
+) {
+    let mut matched_spans: Vec<(Span, bool)> = diag_info
+        .matching_block_spans
+        .iter()
+        .map(|&(open, close)| (open.with_hi(close.lo()), same_indentation_level(sm, open, close)))
+        .collect();
+
+    // sort by `lo`, so the large block spans in the front
+    matched_spans.sort_by_key(|(span, _)| span.lo());
+
+    // We use larger block whose indentation is well to cover those inner mismatched blocks
+    // O(N^2) here, but we are on error reporting path, so it is fine
+    for i in 0..matched_spans.len() {
+        let (block_span, same_ident) = matched_spans[i];
+        if same_ident {
+            for j in i + 1..matched_spans.len() {
+                let (inner_block, inner_same_ident) = matched_spans[j];
+                if block_span.contains(inner_block) && !inner_same_ident {
+                    matched_spans[j] = (inner_block, true);
+                }
+            }
+        }
+    }
+
+    // Find the innermost span candidate for final report
+    let candidate_span =
+        matched_spans.into_iter().rev().find(|&(_, same_ident)| !same_ident).map(|(span, _)| span);
+
+    if let Some(block_span) = candidate_span {
+        err.span_label(block_span.shrink_to_lo(), "this delimiter might not be properly closed...");
+        err.span_label(
+            block_span.shrink_to_hi(),
+            "...as it matches this but it has different indentation",
+        );
+
+        // If there is a empty block in the mismatched span, note it
+        if delim == Delimiter::Brace {
+            for span in diag_info.empty_block_spans.iter() {
+                if block_span.contains(*span) {
+                    err.span_label(*span, "block is empty, you might have not meant to close it");
+                    break;
+                }
+            }
+        }
+    } else {
+        // If there is no suspicious span, give the last properly closed block may help
+        if let Some(parent) = diag_info.matching_block_spans.last()
+            && diag_info.open_delimiters.last().is_none()
+            && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1))
+        {
+            err.span_label(parent.0, "this opening brace...");
+            err.span_label(parent.1, "...matches this closing brace");
+        }
+    }
+}
+
+pub(crate) fn make_errors_for_mismatched_closing_delims<'psess>(
+    unmatcheds: &[UnmatchedDelim],
+    psess: &'psess ParseSess,
+) -> Vec<Diag<'psess>> {
+    unmatcheds
+        .iter()
+        .filter_map(|unmatched| {
+            // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
+            // `unmatched_delims` only for error recovery in the `Parser`.
+            let found_delim = unmatched.found_delim?;
+            let mut spans = vec![unmatched.found_span];
+            if let Some(sp) = unmatched.unclosed_span {
+                spans.push(sp);
+            };
+            let err = psess.dcx().create_err(MismatchedClosingDelimiter {
+                spans,
+                delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind())
+                    .to_string(),
+                unmatched: unmatched.found_span,
+                opening_candidate: unmatched.candidate_span,
+                unclosed: unmatched.unclosed_span,
+            });
+            Some(err)
+        })
+        .collect()
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
new file mode 100644
index 00000000000..85af5a615ae
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -0,0 +1,1145 @@
+use diagnostics::make_errors_for_mismatched_closing_delims;
+use rustc_ast::ast::{self, AttrStyle};
+use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
+use rustc_ast::tokenstream::TokenStream;
+use rustc_ast::util::unicode::{TEXT_FLOW_CONTROL_CHARS, contains_text_flow_control_chars};
+use rustc_errors::codes::*;
+use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
+use rustc_lexer::{
+    Base, Cursor, DocStyle, FrontmatterAllowed, LiteralKind, RawStrError, is_whitespace,
+};
+use rustc_literal_escaper::{EscapeError, Mode, check_for_errors};
+use rustc_session::lint::BuiltinLintDiag;
+use rustc_session::lint::builtin::{
+    RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
+    TEXT_DIRECTION_CODEPOINT_IN_COMMENT, TEXT_DIRECTION_CODEPOINT_IN_LITERAL,
+};
+use rustc_session::parse::ParseSess;
+use rustc_span::{BytePos, Pos, Span, Symbol, sym};
+use tracing::debug;
+
+use crate::errors;
+use crate::lexer::diagnostics::TokenTreeDiagInfo;
+use crate::lexer::unicode_chars::UNICODE_ARRAY;
+
+mod diagnostics;
+mod tokentrees;
+mod unescape_error_reporting;
+mod unicode_chars;
+
+use unescape_error_reporting::{emit_unescape_error, escaped_char};
+
+// This type is used a lot. Make sure it doesn't unintentionally get bigger.
+//
+// This assertion is in this crate, rather than in `rustc_lexer`, because that
+// crate cannot depend on `rustc_data_structures`.
+#[cfg(target_pointer_width = "64")]
+rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
+
+#[derive(Clone, Debug)]
+pub(crate) struct UnmatchedDelim {
+    pub found_delim: Option<Delimiter>,
+    pub found_span: Span,
+    pub unclosed_span: Option<Span>,
+    pub candidate_span: Option<Span>,
+}
+
+pub(crate) fn lex_token_trees<'psess, 'src>(
+    psess: &'psess ParseSess,
+    mut src: &'src str,
+    mut start_pos: BytePos,
+    override_span: Option<Span>,
+) -> Result<TokenStream, Vec<Diag<'psess>>> {
+    // Skip `#!`, if present.
+    if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
+        src = &src[shebang_len..];
+        start_pos = start_pos + BytePos::from_usize(shebang_len);
+    }
+
+    let cursor = Cursor::new(src, FrontmatterAllowed::Yes);
+    let mut lexer = Lexer {
+        psess,
+        start_pos,
+        pos: start_pos,
+        src,
+        cursor,
+        override_span,
+        nbsp_is_whitespace: false,
+        last_lifetime: None,
+        token: Token::dummy(),
+        diag_info: TokenTreeDiagInfo::default(),
+    };
+    let res = lexer.lex_token_trees(/* is_delimited */ false);
+
+    let mut unmatched_closing_delims: Vec<_> =
+        make_errors_for_mismatched_closing_delims(&lexer.diag_info.unmatched_delims, psess);
+
+    match res {
+        Ok((_open_spacing, stream)) => {
+            if unmatched_closing_delims.is_empty() {
+                Ok(stream)
+            } else {
+                // Return error if there are unmatched delimiters or unclosed delimiters.
+                Err(unmatched_closing_delims)
+            }
+        }
+        Err(errs) => {
+            // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
+            // because the delimiter mismatch is more likely to be the root cause of error
+            unmatched_closing_delims.extend(errs);
+            Err(unmatched_closing_delims)
+        }
+    }
+}
+
+struct Lexer<'psess, 'src> {
+    psess: &'psess ParseSess,
+    /// Initial position, read-only.
+    start_pos: BytePos,
+    /// The absolute offset within the source_map of the current character.
+    pos: BytePos,
+    /// Source text to tokenize.
+    src: &'src str,
+    /// Cursor for getting lexer tokens.
+    cursor: Cursor<'src>,
+    override_span: Option<Span>,
+    /// When a "unknown start of token: \u{a0}" has already been emitted earlier
+    /// in this file, it's safe to treat further occurrences of the non-breaking
+    /// space character as whitespace.
+    nbsp_is_whitespace: bool,
+
+    /// Track the `Span` for the leading `'` of the last lifetime. Used for
+    /// diagnostics to detect possible typo where `"` was meant.
+    last_lifetime: Option<Span>,
+
+    /// The current token.
+    token: Token,
+
+    diag_info: TokenTreeDiagInfo,
+}
+
+impl<'psess, 'src> Lexer<'psess, 'src> {
+    fn dcx(&self) -> DiagCtxtHandle<'psess> {
+        self.psess.dcx()
+    }
+
+    fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
+        self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
+    }
+
+    /// Returns the next token, paired with a bool indicating if the token was
+    /// preceded by whitespace.
+    fn next_token_from_cursor(&mut self) -> (Token, bool) {
+        let mut preceded_by_whitespace = false;
+        let mut swallow_next_invalid = 0;
+        // Skip trivial (whitespace & comments) tokens
+        loop {
+            let str_before = self.cursor.as_str();
+            let token = self.cursor.advance_token();
+            let start = self.pos;
+            self.pos = self.pos + BytePos(token.len);
+
+            debug!("next_token: {:?}({:?})", token.kind, self.str_from(start));
+
+            if let rustc_lexer::TokenKind::Semi
+            | rustc_lexer::TokenKind::LineComment { .. }
+            | rustc_lexer::TokenKind::BlockComment { .. }
+            | rustc_lexer::TokenKind::CloseParen
+            | rustc_lexer::TokenKind::CloseBrace
+            | rustc_lexer::TokenKind::CloseBracket = token.kind
+            {
+                // Heuristic: we assume that it is unlikely we're dealing with an unterminated
+                // string surrounded by single quotes.
+                self.last_lifetime = None;
+            }
+
+            // Now "cook" the token, converting the simple `rustc_lexer::TokenKind` enum into a
+            // rich `rustc_ast::TokenKind`. This turns strings into interned symbols and runs
+            // additional validation.
+            let kind = match token.kind {
+                rustc_lexer::TokenKind::LineComment { doc_style } => {
+                    // Skip non-doc comments
+                    let Some(doc_style) = doc_style else {
+                        self.lint_unicode_text_flow(start);
+                        preceded_by_whitespace = true;
+                        continue;
+                    };
+
+                    // Opening delimiter of the length 3 is not included into the symbol.
+                    let content_start = start + BytePos(3);
+                    let content = self.str_from(content_start);
+                    self.lint_doc_comment_unicode_text_flow(start, content);
+                    self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
+                }
+                rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
+                    if !terminated {
+                        self.report_unterminated_block_comment(start, doc_style);
+                    }
+
+                    // Skip non-doc comments
+                    let Some(doc_style) = doc_style else {
+                        self.lint_unicode_text_flow(start);
+                        preceded_by_whitespace = true;
+                        continue;
+                    };
+
+                    // Opening delimiter of the length 3 and closing delimiter of the length 2
+                    // are not included into the symbol.
+                    let content_start = start + BytePos(3);
+                    let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
+                    let content = self.str_from_to(content_start, content_end);
+                    self.lint_doc_comment_unicode_text_flow(start, content);
+                    self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
+                }
+                rustc_lexer::TokenKind::Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } => {
+                    self.validate_frontmatter(start, has_invalid_preceding_whitespace, invalid_infostring);
+                    preceded_by_whitespace = true;
+                    continue;
+                }
+                rustc_lexer::TokenKind::Whitespace => {
+                    preceded_by_whitespace = true;
+                    continue;
+                }
+                rustc_lexer::TokenKind::Ident => self.ident(start),
+                rustc_lexer::TokenKind::RawIdent => {
+                    let sym = nfc_normalize(self.str_from(start + BytePos(2)));
+                    let span = self.mk_sp(start, self.pos);
+                    self.psess.symbol_gallery.insert(sym, span);
+                    if !sym.can_be_raw() {
+                        self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
+                    }
+                    self.psess.raw_identifier_spans.push(span);
+                    token::Ident(sym, IdentIsRaw::Yes)
+                }
+                rustc_lexer::TokenKind::UnknownPrefix => {
+                    self.report_unknown_prefix(start);
+                    self.ident(start)
+                }
+                rustc_lexer::TokenKind::UnknownPrefixLifetime => {
+                    self.report_unknown_prefix(start);
+                    // Include the leading `'` in the real identifier, for macro
+                    // expansion purposes. See #12512 for the gory details of why
+                    // this is necessary.
+                    let lifetime_name = self.str_from(start);
+                    self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
+                    let ident = Symbol::intern(lifetime_name);
+                    token::Lifetime(ident, IdentIsRaw::No)
+                }
+                rustc_lexer::TokenKind::InvalidIdent
+                    // Do not recover an identifier with emoji if the codepoint is a confusable
+                    // with a recoverable substitution token, like `➖`.
+                    if !UNICODE_ARRAY.iter().any(|&(c, _, _)| {
+                        let sym = self.str_from(start);
+                        sym.chars().count() == 1 && c == sym.chars().next().unwrap()
+                    }) =>
+                {
+                    let sym = nfc_normalize(self.str_from(start));
+                    let span = self.mk_sp(start, self.pos);
+                    self.psess
+                        .bad_unicode_identifiers
+                        .borrow_mut()
+                        .entry(sym)
+                        .or_default()
+                        .push(span);
+                    token::Ident(sym, IdentIsRaw::No)
+                }
+                // split up (raw) c string literals to an ident and a string literal when edition <
+                // 2021.
+                rustc_lexer::TokenKind::Literal {
+                    kind: kind @ (LiteralKind::CStr { .. } | LiteralKind::RawCStr { .. }),
+                    suffix_start: _,
+                } if !self.mk_sp(start, self.pos).edition().at_least_rust_2021() => {
+                    let prefix_len = match kind {
+                        LiteralKind::CStr { .. } => 1,
+                        LiteralKind::RawCStr { .. } => 2,
+                        _ => unreachable!(),
+                    };
+
+                    // reset the state so that only the prefix ("c" or "cr")
+                    // was consumed.
+                    let lit_start = start + BytePos(prefix_len);
+                    self.pos = lit_start;
+                    self.cursor = Cursor::new(&str_before[prefix_len as usize..], FrontmatterAllowed::No);
+                    self.report_unknown_prefix(start);
+                    let prefix_span = self.mk_sp(start, lit_start);
+                    return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
+                }
+                rustc_lexer::TokenKind::GuardedStrPrefix => {
+                    self.maybe_report_guarded_str(start, str_before)
+                }
+                rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
+                    let suffix_start = start + BytePos(suffix_start);
+                    let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
+                    let suffix = if suffix_start < self.pos {
+                        let string = self.str_from(suffix_start);
+                        if string == "_" {
+                            self.dcx().emit_err(errors::UnderscoreLiteralSuffix {
+                                span: self.mk_sp(suffix_start, self.pos),
+                            });
+                            None
+                        } else {
+                            Some(Symbol::intern(string))
+                        }
+                    } else {
+                        None
+                    };
+                    self.lint_literal_unicode_text_flow(symbol, kind, self.mk_sp(start, self.pos), "literal");
+                    token::Literal(token::Lit { kind, symbol, suffix })
+                }
+                rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+                    // Include the leading `'` in the real identifier, for macro
+                    // expansion purposes. See #12512 for the gory details of why
+                    // this is necessary.
+                    let lifetime_name = self.str_from(start);
+                    self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
+                    if starts_with_number {
+                        let span = self.mk_sp(start, self.pos);
+                        self.dcx()
+                            .struct_err("lifetimes cannot start with a number")
+                            .with_span(span)
+                            .stash(span, StashKey::LifetimeIsChar);
+                    }
+                    let ident = Symbol::intern(lifetime_name);
+                    token::Lifetime(ident, IdentIsRaw::No)
+                }
+                rustc_lexer::TokenKind::RawLifetime => {
+                    self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
+
+                    let ident_start = start + BytePos(3);
+                    let prefix_span = self.mk_sp(start, ident_start);
+
+                    if prefix_span.at_least_rust_2021() {
+                        // If the raw lifetime is followed by \' then treat it a normal
+                        // lifetime followed by a \', which is to interpret it as a character
+                        // literal. In this case, it's always an invalid character literal
+                        // since the literal must necessarily have >3 characters (r#...) inside
+                        // of it, which is invalid.
+                        if self.cursor.as_str().starts_with('\'') {
+                            let lit_span = self.mk_sp(start, self.pos + BytePos(1));
+                            let contents = self.str_from_to(start + BytePos(1), self.pos);
+                            emit_unescape_error(
+                                self.dcx(),
+                                contents,
+                                lit_span,
+                                lit_span,
+                                Mode::Char,
+                                0..contents.len(),
+                                EscapeError::MoreThanOneChar,
+                            )
+                            .expect("expected error");
+                        }
+
+                        let span = self.mk_sp(start, self.pos);
+
+                        let lifetime_name_without_tick =
+                            Symbol::intern(&self.str_from(ident_start));
+                        if !lifetime_name_without_tick.can_be_raw() {
+                            self.dcx().emit_err(
+                                errors::CannotBeRawLifetime {
+                                    span,
+                                    ident: lifetime_name_without_tick
+                                }
+                            );
+                        }
+
+                        // Put the `'` back onto the lifetime name.
+                        let mut lifetime_name =
+                            String::with_capacity(lifetime_name_without_tick.as_str().len() + 1);
+                        lifetime_name.push('\'');
+                        lifetime_name += lifetime_name_without_tick.as_str();
+                        let sym = Symbol::intern(&lifetime_name);
+
+                        // Make sure we mark this as a raw identifier.
+                        self.psess.raw_identifier_spans.push(span);
+
+                        token::Lifetime(sym, IdentIsRaw::Yes)
+                    } else {
+                        // Otherwise, this should be parsed like `'r`. Warn about it though.
+                        self.psess.buffer_lint(
+                            RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
+                            prefix_span,
+                            ast::CRATE_NODE_ID,
+                            BuiltinLintDiag::RawPrefix(prefix_span),
+                        );
+
+                        // Reset the state so we just lex the `'r`.
+                        let lt_start = start + BytePos(2);
+                        self.pos = lt_start;
+                        self.cursor = Cursor::new(&str_before[2 as usize..], FrontmatterAllowed::No);
+
+                        let lifetime_name = self.str_from(start);
+                        let ident = Symbol::intern(lifetime_name);
+                        token::Lifetime(ident, IdentIsRaw::No)
+                    }
+                }
+                rustc_lexer::TokenKind::Semi => token::Semi,
+                rustc_lexer::TokenKind::Comma => token::Comma,
+                rustc_lexer::TokenKind::Dot => token::Dot,
+                rustc_lexer::TokenKind::OpenParen => token::OpenParen,
+                rustc_lexer::TokenKind::CloseParen => token::CloseParen,
+                rustc_lexer::TokenKind::OpenBrace => token::OpenBrace,
+                rustc_lexer::TokenKind::CloseBrace => token::CloseBrace,
+                rustc_lexer::TokenKind::OpenBracket => token::OpenBracket,
+                rustc_lexer::TokenKind::CloseBracket => token::CloseBracket,
+                rustc_lexer::TokenKind::At => token::At,
+                rustc_lexer::TokenKind::Pound => token::Pound,
+                rustc_lexer::TokenKind::Tilde => token::Tilde,
+                rustc_lexer::TokenKind::Question => token::Question,
+                rustc_lexer::TokenKind::Colon => token::Colon,
+                rustc_lexer::TokenKind::Dollar => token::Dollar,
+                rustc_lexer::TokenKind::Eq => token::Eq,
+                rustc_lexer::TokenKind::Bang => token::Bang,
+                rustc_lexer::TokenKind::Lt => token::Lt,
+                rustc_lexer::TokenKind::Gt => token::Gt,
+                rustc_lexer::TokenKind::Minus => token::Minus,
+                rustc_lexer::TokenKind::And => token::And,
+                rustc_lexer::TokenKind::Or => token::Or,
+                rustc_lexer::TokenKind::Plus => token::Plus,
+                rustc_lexer::TokenKind::Star => token::Star,
+                rustc_lexer::TokenKind::Slash => token::Slash,
+                rustc_lexer::TokenKind::Caret => token::Caret,
+                rustc_lexer::TokenKind::Percent => token::Percent,
+
+                rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
+                    // Don't emit diagnostics for sequences of the same invalid token
+                    if swallow_next_invalid > 0 {
+                        swallow_next_invalid -= 1;
+                        continue;
+                    }
+                    let mut it = self.str_from_to_end(start).chars();
+                    let c = it.next().unwrap();
+                    if c == '\u{00a0}' {
+                        // If an error has already been reported on non-breaking
+                        // space characters earlier in the file, treat all
+                        // subsequent occurrences as whitespace.
+                        if self.nbsp_is_whitespace {
+                            preceded_by_whitespace = true;
+                            continue;
+                        }
+                        self.nbsp_is_whitespace = true;
+                    }
+                    let repeats = it.take_while(|c1| *c1 == c).count();
+                    // FIXME: the lexer could be used to turn the ASCII version of unicode
+                    // homoglyphs, instead of keeping a table in `check_for_substitution`into the
+                    // token. Ideally, this should be inside `rustc_lexer`. However, we should
+                    // first remove compound tokens like `<<` from `rustc_lexer`, and then add
+                    // fancier error recovery to it, as there will be less overall work to do this
+                    // way.
+                    let (token, sugg) =
+                        unicode_chars::check_for_substitution(self, start, c, repeats + 1);
+                    self.dcx().emit_err(errors::UnknownTokenStart {
+                        span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
+                        escaped: escaped_char(c),
+                        sugg,
+                        null: if c == '\x00' { Some(errors::UnknownTokenNull) } else { None },
+                        repeat: if repeats > 0 {
+                            swallow_next_invalid = repeats;
+                            Some(errors::UnknownTokenRepeat { repeats })
+                        } else {
+                            None
+                        },
+                    });
+
+                    if let Some(token) = token {
+                        token
+                    } else {
+                        preceded_by_whitespace = true;
+                        continue;
+                    }
+                }
+                rustc_lexer::TokenKind::Eof => token::Eof,
+            };
+            let span = self.mk_sp(start, self.pos);
+            return (Token::new(kind, span), preceded_by_whitespace);
+        }
+    }
+
+    fn ident(&self, start: BytePos) -> TokenKind {
+        let sym = nfc_normalize(self.str_from(start));
+        let span = self.mk_sp(start, self.pos);
+        self.psess.symbol_gallery.insert(sym, span);
+        token::Ident(sym, IdentIsRaw::No)
+    }
+
+    /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly
+    /// complain about it.
+    fn lint_unicode_text_flow(&self, start: BytePos) {
+        // Opening delimiter of the length 2 is not included into the comment text.
+        let content_start = start + BytePos(2);
+        let content = self.str_from(content_start);
+        if contains_text_flow_control_chars(content) {
+            let span = self.mk_sp(start, self.pos);
+            self.psess.buffer_lint(
+                TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
+                span,
+                ast::CRATE_NODE_ID,
+                BuiltinLintDiag::UnicodeTextFlow(span, content.to_string()),
+            );
+        }
+    }
+
+    fn lint_doc_comment_unicode_text_flow(&mut self, start: BytePos, content: &str) {
+        if contains_text_flow_control_chars(content) {
+            self.report_text_direction_codepoint(
+                content,
+                self.mk_sp(start, self.pos),
+                0,
+                false,
+                "doc comment",
+            );
+        }
+    }
+
+    fn lint_literal_unicode_text_flow(
+        &mut self,
+        text: Symbol,
+        lit_kind: token::LitKind,
+        span: Span,
+        label: &'static str,
+    ) {
+        if !contains_text_flow_control_chars(text.as_str()) {
+            return;
+        }
+        let (padding, point_at_inner_spans) = match lit_kind {
+            // account for `"` or `'`
+            token::LitKind::Str | token::LitKind::Char => (1, true),
+            // account for `c"`
+            token::LitKind::CStr => (2, true),
+            // account for `r###"`
+            token::LitKind::StrRaw(n) => (n as u32 + 2, true),
+            // account for `cr###"`
+            token::LitKind::CStrRaw(n) => (n as u32 + 3, true),
+            // suppress bad literals.
+            token::LitKind::Err(_) => return,
+            // Be conservative just in case new literals do support these.
+            _ => (0, false),
+        };
+        self.report_text_direction_codepoint(
+            text.as_str(),
+            span,
+            padding,
+            point_at_inner_spans,
+            label,
+        );
+    }
+
+    fn report_text_direction_codepoint(
+        &self,
+        text: &str,
+        span: Span,
+        padding: u32,
+        point_at_inner_spans: bool,
+        label: &str,
+    ) {
+        // Obtain the `Span`s for each of the forbidden chars.
+        let spans: Vec<_> = text
+            .char_indices()
+            .filter_map(|(i, c)| {
+                TEXT_FLOW_CONTROL_CHARS.contains(&c).then(|| {
+                    let lo = span.lo() + BytePos(i as u32 + padding);
+                    (c, span.with_lo(lo).with_hi(lo + BytePos(c.len_utf8() as u32)))
+                })
+            })
+            .collect();
+
+        let count = spans.len();
+        let labels = point_at_inner_spans.then_some(spans.clone());
+
+        self.psess.buffer_lint(
+            TEXT_DIRECTION_CODEPOINT_IN_LITERAL,
+            span,
+            ast::CRATE_NODE_ID,
+            BuiltinLintDiag::HiddenUnicodeCodepoints {
+                label: label.to_string(),
+                count,
+                span_label: span,
+                labels,
+                escape: point_at_inner_spans && !spans.is_empty(),
+                spans,
+            },
+        );
+    }
+
+    fn validate_frontmatter(
+        &self,
+        start: BytePos,
+        has_invalid_preceding_whitespace: bool,
+        invalid_infostring: bool,
+    ) {
+        let s = self.str_from(start);
+        let real_start = s.find("---").unwrap();
+        let frontmatter_opening_pos = BytePos(real_start as u32) + start;
+        let s_new = &s[real_start..];
+        let within = s_new.trim_start_matches('-');
+        let len_opening = s_new.len() - within.len();
+
+        let frontmatter_opening_end_pos = frontmatter_opening_pos + BytePos(len_opening as u32);
+        if has_invalid_preceding_whitespace {
+            let line_start =
+                BytePos(s[..real_start].rfind("\n").map_or(0, |i| i as u32 + 1)) + start;
+            let span = self.mk_sp(line_start, frontmatter_opening_end_pos);
+            let label_span = self.mk_sp(line_start, frontmatter_opening_pos);
+            self.dcx().emit_err(errors::FrontmatterInvalidOpeningPrecedingWhitespace {
+                span,
+                note_span: label_span,
+            });
+        }
+
+        if invalid_infostring {
+            let line_end = s[real_start..].find('\n').unwrap_or(s[real_start..].len());
+            let span = self.mk_sp(
+                frontmatter_opening_end_pos,
+                frontmatter_opening_pos + BytePos(line_end as u32),
+            );
+            self.dcx().emit_err(errors::FrontmatterInvalidInfostring { span });
+        }
+
+        let last_line_start = within.rfind('\n').map_or(0, |i| i + 1);
+        let last_line = &within[last_line_start..];
+        let last_line_trimmed = last_line.trim_start_matches(is_whitespace);
+        let last_line_start_pos = frontmatter_opening_end_pos + BytePos(last_line_start as u32);
+
+        let frontmatter_span = self.mk_sp(frontmatter_opening_pos, self.pos);
+        self.psess.gated_spans.gate(sym::frontmatter, frontmatter_span);
+
+        if !last_line_trimmed.starts_with("---") {
+            let label_span = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos);
+            self.dcx().emit_err(errors::FrontmatterUnclosed {
+                span: frontmatter_span,
+                note_span: label_span,
+            });
+            return;
+        }
+
+        if last_line_trimmed.len() != last_line.len() {
+            let line_end = last_line_start_pos + BytePos(last_line.len() as u32);
+            let span = self.mk_sp(last_line_start_pos, line_end);
+            let whitespace_end =
+                last_line_start_pos + BytePos((last_line.len() - last_line_trimmed.len()) as u32);
+            let label_span = self.mk_sp(last_line_start_pos, whitespace_end);
+            self.dcx().emit_err(errors::FrontmatterInvalidClosingPrecedingWhitespace {
+                span,
+                note_span: label_span,
+            });
+        }
+
+        let rest = last_line_trimmed.trim_start_matches('-');
+        let len_close = last_line_trimmed.len() - rest.len();
+        if len_close != len_opening {
+            let span = self.mk_sp(frontmatter_opening_pos, self.pos);
+            let opening = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos);
+            let last_line_close_pos = last_line_start_pos + BytePos(len_close as u32);
+            let close = self.mk_sp(last_line_start_pos, last_line_close_pos);
+            self.dcx().emit_err(errors::FrontmatterLengthMismatch {
+                span,
+                opening,
+                close,
+                len_opening,
+                len_close,
+            });
+        }
+
+        if !rest.trim_matches(is_whitespace).is_empty() {
+            let span = self.mk_sp(last_line_start_pos, self.pos);
+            self.dcx().emit_err(errors::FrontmatterExtraCharactersAfterClose { span });
+        }
+    }
+
+    fn cook_doc_comment(
+        &self,
+        content_start: BytePos,
+        content: &str,
+        comment_kind: CommentKind,
+        doc_style: DocStyle,
+    ) -> TokenKind {
+        if content.contains('\r') {
+            for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
+                let span = self.mk_sp(
+                    content_start + BytePos(idx as u32),
+                    content_start + BytePos(idx as u32 + 1),
+                );
+                let block = matches!(comment_kind, CommentKind::Block);
+                self.dcx().emit_err(errors::CrDocComment { span, block });
+            }
+        }
+
+        let attr_style = match doc_style {
+            DocStyle::Outer => AttrStyle::Outer,
+            DocStyle::Inner => AttrStyle::Inner,
+        };
+
+        token::DocComment(comment_kind, attr_style, Symbol::intern(content))
+    }
+
+    fn cook_lexer_literal(
+        &self,
+        start: BytePos,
+        end: BytePos,
+        kind: rustc_lexer::LiteralKind,
+    ) -> (token::LitKind, Symbol) {
+        match kind {
+            rustc_lexer::LiteralKind::Char { terminated } => {
+                if !terminated {
+                    let mut err = self
+                        .dcx()
+                        .struct_span_fatal(self.mk_sp(start, end), "unterminated character literal")
+                        .with_code(E0762);
+                    if let Some(lt_sp) = self.last_lifetime {
+                        err.multipart_suggestion(
+                            "if you meant to write a string literal, use double quotes",
+                            vec![
+                                (lt_sp, "\"".to_string()),
+                                (self.mk_sp(start, start + BytePos(1)), "\"".to_string()),
+                            ],
+                            Applicability::MaybeIncorrect,
+                        );
+                    }
+                    err.emit()
+                }
+                self.cook_quoted(token::Char, Mode::Char, start, end, 1, 1) // ' '
+            }
+            rustc_lexer::LiteralKind::Byte { terminated } => {
+                if !terminated {
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start + BytePos(1), end),
+                            "unterminated byte constant",
+                        )
+                        .with_code(E0763)
+                        .emit()
+                }
+                self.cook_quoted(token::Byte, Mode::Byte, start, end, 2, 1) // b' '
+            }
+            rustc_lexer::LiteralKind::Str { terminated } => {
+                if !terminated {
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start, end),
+                            "unterminated double quote string",
+                        )
+                        .with_code(E0765)
+                        .emit()
+                }
+                self.cook_quoted(token::Str, Mode::Str, start, end, 1, 1) // " "
+            }
+            rustc_lexer::LiteralKind::ByteStr { terminated } => {
+                if !terminated {
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start + BytePos(1), end),
+                            "unterminated double quote byte string",
+                        )
+                        .with_code(E0766)
+                        .emit()
+                }
+                self.cook_quoted(token::ByteStr, Mode::ByteStr, start, end, 2, 1)
+                // b" "
+            }
+            rustc_lexer::LiteralKind::CStr { terminated } => {
+                if !terminated {
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start + BytePos(1), end),
+                            "unterminated C string",
+                        )
+                        .with_code(E0767)
+                        .emit()
+                }
+                self.cook_quoted(token::CStr, Mode::CStr, start, end, 2, 1) // c" "
+            }
+            rustc_lexer::LiteralKind::RawStr { n_hashes } => {
+                if let Some(n_hashes) = n_hashes {
+                    let n = u32::from(n_hashes);
+                    let kind = token::StrRaw(n_hashes);
+                    self.cook_quoted(kind, Mode::RawStr, start, end, 2 + n, 1 + n)
+                // r##" "##
+                } else {
+                    self.report_raw_str_error(start, 1);
+                }
+            }
+            rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
+                if let Some(n_hashes) = n_hashes {
+                    let n = u32::from(n_hashes);
+                    let kind = token::ByteStrRaw(n_hashes);
+                    self.cook_quoted(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n)
+                // br##" "##
+                } else {
+                    self.report_raw_str_error(start, 2);
+                }
+            }
+            rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
+                if let Some(n_hashes) = n_hashes {
+                    let n = u32::from(n_hashes);
+                    let kind = token::CStrRaw(n_hashes);
+                    self.cook_quoted(kind, Mode::RawCStr, start, end, 3 + n, 1 + n)
+                // cr##" "##
+                } else {
+                    self.report_raw_str_error(start, 2);
+                }
+            }
+            rustc_lexer::LiteralKind::Int { base, empty_int } => {
+                let mut kind = token::Integer;
+                if empty_int {
+                    let span = self.mk_sp(start, end);
+                    let guar = self.dcx().emit_err(errors::NoDigitsLiteral { span });
+                    kind = token::Err(guar);
+                } else if matches!(base, Base::Binary | Base::Octal) {
+                    let base = base as u32;
+                    let s = self.str_from_to(start + BytePos(2), end);
+                    for (idx, c) in s.char_indices() {
+                        let span = self.mk_sp(
+                            start + BytePos::from_usize(2 + idx),
+                            start + BytePos::from_usize(2 + idx + c.len_utf8()),
+                        );
+                        if c != '_' && c.to_digit(base).is_none() {
+                            let guar =
+                                self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
+                            kind = token::Err(guar);
+                        }
+                    }
+                }
+                (kind, self.symbol_from_to(start, end))
+            }
+            rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
+                let mut kind = token::Float;
+                if empty_exponent {
+                    let span = self.mk_sp(start, self.pos);
+                    let guar = self.dcx().emit_err(errors::EmptyExponentFloat { span });
+                    kind = token::Err(guar);
+                }
+                let base = match base {
+                    Base::Hexadecimal => Some("hexadecimal"),
+                    Base::Octal => Some("octal"),
+                    Base::Binary => Some("binary"),
+                    _ => None,
+                };
+                if let Some(base) = base {
+                    let span = self.mk_sp(start, end);
+                    let guar =
+                        self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
+                    kind = token::Err(guar)
+                }
+                (kind, self.symbol_from_to(start, end))
+            }
+        }
+    }
+
+    #[inline]
+    fn src_index(&self, pos: BytePos) -> usize {
+        (pos - self.start_pos).to_usize()
+    }
+
+    /// Slice of the source text from `start` up to but excluding `self.pos`,
+    /// meaning the slice does not include the character `self.ch`.
+    fn str_from(&self, start: BytePos) -> &'src str {
+        self.str_from_to(start, self.pos)
+    }
+
+    /// As symbol_from, with an explicit endpoint.
+    fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol {
+        debug!("taking an ident from {:?} to {:?}", start, end);
+        Symbol::intern(self.str_from_to(start, end))
+    }
+
+    /// Slice of the source text spanning from `start` up to but excluding `end`.
+    fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
+        &self.src[self.src_index(start)..self.src_index(end)]
+    }
+
+    /// Slice of the source text spanning from `start` until the end
+    fn str_from_to_end(&self, start: BytePos) -> &'src str {
+        &self.src[self.src_index(start)..]
+    }
+
+    fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! {
+        match rustc_lexer::validate_raw_str(self.str_from(start), prefix_len) {
+            Err(RawStrError::InvalidStarter { bad_char }) => {
+                self.report_non_started_raw_string(start, bad_char)
+            }
+            Err(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self
+                .report_unterminated_raw_string(start, expected, possible_terminator_offset, found),
+            Err(RawStrError::TooManyDelimiters { found }) => {
+                self.report_too_many_hashes(start, found)
+            }
+            Ok(()) => panic!("no error found for supposedly invalid raw string literal"),
+        }
+    }
+
+    fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
+        self.dcx()
+            .struct_span_fatal(
+                self.mk_sp(start, self.pos),
+                format!(
+                    "found invalid character; only `#` is allowed in raw string delimitation: {}",
+                    escaped_char(bad_char)
+                ),
+            )
+            .emit()
+    }
+
+    fn report_unterminated_raw_string(
+        &self,
+        start: BytePos,
+        n_hashes: u32,
+        possible_offset: Option<u32>,
+        found_terminators: u32,
+    ) -> ! {
+        let mut err =
+            self.dcx().struct_span_fatal(self.mk_sp(start, start), "unterminated raw string");
+        err.code(E0748);
+        err.span_label(self.mk_sp(start, start), "unterminated raw string");
+
+        if n_hashes > 0 {
+            err.note(format!(
+                "this raw string should be terminated with `\"{}`",
+                "#".repeat(n_hashes as usize)
+            ));
+        }
+
+        if let Some(possible_offset) = possible_offset {
+            let lo = start + BytePos(possible_offset);
+            let hi = lo + BytePos(found_terminators);
+            let span = self.mk_sp(lo, hi);
+            err.span_suggestion(
+                span,
+                "consider terminating the string here",
+                "#".repeat(n_hashes as usize),
+                Applicability::MaybeIncorrect,
+            );
+        }
+
+        err.emit()
+    }
+
+    fn report_unterminated_block_comment(&self, start: BytePos, doc_style: Option<DocStyle>) {
+        let msg = match doc_style {
+            Some(_) => "unterminated block doc-comment",
+            None => "unterminated block comment",
+        };
+        let last_bpos = self.pos;
+        let mut err = self.dcx().struct_span_fatal(self.mk_sp(start, last_bpos), msg);
+        err.code(E0758);
+        let mut nested_block_comment_open_idxs = vec![];
+        let mut last_nested_block_comment_idxs = None;
+        let mut content_chars = self.str_from(start).char_indices().peekable();
+
+        while let Some((idx, current_char)) = content_chars.next() {
+            match content_chars.peek() {
+                Some((_, '*')) if current_char == '/' => {
+                    nested_block_comment_open_idxs.push(idx);
+                }
+                Some((_, '/')) if current_char == '*' => {
+                    last_nested_block_comment_idxs =
+                        nested_block_comment_open_idxs.pop().map(|open_idx| (open_idx, idx));
+                }
+                _ => {}
+            };
+        }
+
+        if let Some((nested_open_idx, nested_close_idx)) = last_nested_block_comment_idxs {
+            err.span_label(self.mk_sp(start, start + BytePos(2)), msg)
+                .span_label(
+                    self.mk_sp(
+                        start + BytePos(nested_open_idx as u32),
+                        start + BytePos(nested_open_idx as u32 + 2),
+                    ),
+                    "...as last nested comment starts here, maybe you want to close this instead?",
+                )
+                .span_label(
+                    self.mk_sp(
+                        start + BytePos(nested_close_idx as u32),
+                        start + BytePos(nested_close_idx as u32 + 2),
+                    ),
+                    "...and last nested comment terminates here.",
+                );
+        }
+
+        err.emit();
+    }
+
+    // RFC 3101 introduced the idea of (reserved) prefixes. As of Rust 2021,
+    // using a (unknown) prefix is an error. In earlier editions, however, they
+    // only result in a (allowed by default) lint, and are treated as regular
+    // identifier tokens.
+    fn report_unknown_prefix(&self, start: BytePos) {
+        let prefix_span = self.mk_sp(start, self.pos);
+        let prefix = self.str_from_to(start, self.pos);
+        let expn_data = prefix_span.ctxt().outer_expn_data();
+
+        if expn_data.edition.at_least_rust_2021() {
+            // In Rust 2021, this is a hard error.
+            let sugg = if prefix == "rb" {
+                Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
+            } else if prefix == "rc" {
+                Some(errors::UnknownPrefixSugg::UseCr(prefix_span))
+            } else if expn_data.is_root() {
+                if self.cursor.first() == '\''
+                    && let Some(start) = self.last_lifetime
+                    && self.cursor.third() != '\''
+                    && let end = self.mk_sp(self.pos, self.pos + BytePos(1))
+                    && !self.psess.source_map().is_multiline(start.until(end))
+                {
+                    // FIXME: An "unclosed `char`" error will be emitted already in some cases,
+                    // but it's hard to silence this error while not also silencing important cases
+                    // too. We should use the error stashing machinery instead.
+                    Some(errors::UnknownPrefixSugg::MeantStr { start, end })
+                } else {
+                    Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
+                }
+            } else {
+                None
+            };
+            self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
+        } else {
+            // Before Rust 2021, only emit a lint for migration.
+            self.psess.buffer_lint(
+                RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
+                prefix_span,
+                ast::CRATE_NODE_ID,
+                BuiltinLintDiag::ReservedPrefix(prefix_span, prefix.to_string()),
+            );
+        }
+    }
+
+    /// Detect guarded string literal syntax
+    ///
+    /// RFC 3593 reserved this syntax for future use. As of Rust 2024,
+    /// using this syntax produces an error. In earlier editions, however, it
+    /// only results in an (allowed by default) lint, and is treated as
+    /// separate tokens.
+    fn maybe_report_guarded_str(&mut self, start: BytePos, str_before: &'src str) -> TokenKind {
+        let span = self.mk_sp(start, self.pos);
+        let edition2024 = span.edition().at_least_rust_2024();
+
+        let space_pos = start + BytePos(1);
+        let space_span = self.mk_sp(space_pos, space_pos);
+
+        let mut cursor = Cursor::new(str_before, FrontmatterAllowed::No);
+
+        let (is_string, span, unterminated) = match cursor.guarded_double_quoted_string() {
+            Some(rustc_lexer::GuardedStr { n_hashes, terminated, token_len }) => {
+                let end = start + BytePos(token_len);
+                let span = self.mk_sp(start, end);
+                let str_start = start + BytePos(n_hashes);
+
+                if edition2024 {
+                    self.cursor = cursor;
+                    self.pos = end;
+                }
+
+                let unterminated = if terminated { None } else { Some(str_start) };
+
+                (true, span, unterminated)
+            }
+            None => {
+                // We should only get here in the `##+` case.
+                debug_assert_eq!(self.str_from_to(start, start + BytePos(2)), "##");
+
+                (false, span, None)
+            }
+        };
+        if edition2024 {
+            if let Some(str_start) = unterminated {
+                // Only a fatal error if string is unterminated.
+                self.dcx()
+                    .struct_span_fatal(
+                        self.mk_sp(str_start, self.pos),
+                        "unterminated double quote string",
+                    )
+                    .with_code(E0765)
+                    .emit()
+            }
+
+            let sugg = if span.from_expansion() {
+                None
+            } else {
+                Some(errors::GuardedStringSugg(space_span))
+            };
+
+            // In Edition 2024 and later, emit a hard error.
+            let err = if is_string {
+                self.dcx().emit_err(errors::ReservedString { span, sugg })
+            } else {
+                self.dcx().emit_err(errors::ReservedMultihash { span, sugg })
+            };
+
+            token::Literal(token::Lit {
+                kind: token::Err(err),
+                symbol: self.symbol_from_to(start, self.pos),
+                suffix: None,
+            })
+        } else {
+            // Before Rust 2024, only emit a lint for migration.
+            self.psess.buffer_lint(
+                RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
+                span,
+                ast::CRATE_NODE_ID,
+                BuiltinLintDiag::ReservedString { is_string, suggestion: space_span },
+            );
+
+            // For backwards compatibility, roll back to after just the first `#`
+            // and return the `Pound` token.
+            self.pos = start + BytePos(1);
+            self.cursor = Cursor::new(&str_before[1..], FrontmatterAllowed::No);
+            token::Pound
+        }
+    }
+
+    fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
+        self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
+    }
+
+    fn cook_quoted(
+        &self,
+        mut kind: token::LitKind,
+        mode: Mode,
+        start: BytePos,
+        end: BytePos,
+        prefix_len: u32,
+        postfix_len: u32,
+    ) -> (token::LitKind, Symbol) {
+        let content_start = start + BytePos(prefix_len);
+        let content_end = end - BytePos(postfix_len);
+        let lit_content = self.str_from_to(content_start, content_end);
+        check_for_errors(lit_content, mode, |range, err| {
+            let span_with_quotes = self.mk_sp(start, end);
+            let (start, end) = (range.start as u32, range.end as u32);
+            let lo = content_start + BytePos(start);
+            let hi = lo + BytePos(end - start);
+            let span = self.mk_sp(lo, hi);
+            let is_fatal = err.is_fatal();
+            if let Some(guar) = emit_unescape_error(
+                self.dcx(),
+                lit_content,
+                span_with_quotes,
+                span,
+                mode,
+                range,
+                err,
+            ) {
+                assert!(is_fatal);
+                kind = token::Err(guar);
+            }
+        });
+
+        // We normally exclude the quotes for the symbol, but for errors we
+        // include it because it results in clearer error messages.
+        let sym = if !matches!(kind, token::Err(_)) {
+            Symbol::intern(lit_content)
+        } else {
+            self.symbol_from_to(start, end)
+        };
+        (kind, sym)
+    }
+}
+
+pub fn nfc_normalize(string: &str) -> Symbol {
+    use unicode_normalization::{IsNormalized, UnicodeNormalization, is_nfc_quick};
+    match is_nfc_quick(string.chars()) {
+        IsNormalized::Yes => Symbol::intern(string),
+        _ => {
+            let normalized_str: String = string.chars().nfc().collect();
+            Symbol::intern(&normalized_str)
+        }
+    }
+}
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
new file mode 100644
index 00000000000..634f4c30b26
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -0,0 +1,258 @@
+use rustc_ast::token::{self, Delimiter, Token};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast_pretty::pprust::token_to_string;
+use rustc_errors::Diag;
+
+use super::diagnostics::{
+    report_missing_open_delim, report_suspicious_mismatch_block, same_indentation_level,
+};
+use super::{Lexer, UnmatchedDelim};
+
+impl<'psess, 'src> Lexer<'psess, 'src> {
+    // Lex into a token stream. The `Spacing` in the result is that of the
+    // opening delimiter.
+    pub(super) fn lex_token_trees(
+        &mut self,
+        is_delimited: bool,
+    ) -> Result<(Spacing, TokenStream), Vec<Diag<'psess>>> {
+        // Move past the opening delimiter.
+        let open_spacing = self.bump_minimal();
+
+        let mut buf = Vec::new();
+        loop {
+            if let Some(delim) = self.token.kind.open_delim() {
+                // Invisible delimiters cannot occur here because `TokenTreesReader` parses
+                // code directly from strings, with no macro expansion involved.
+                debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
+                buf.push(match self.lex_token_tree_open_delim(delim) {
+                    Ok(val) => val,
+                    Err(errs) => return Err(errs),
+                })
+            } else if let Some(delim) = self.token.kind.close_delim() {
+                // Invisible delimiters cannot occur here because `TokenTreesReader` parses
+                // code directly from strings, with no macro expansion involved.
+                debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
+                return if is_delimited {
+                    Ok((open_spacing, TokenStream::new(buf)))
+                } else {
+                    Err(vec![self.close_delim_err(delim)])
+                };
+            } else if self.token.kind == token::Eof {
+                return if is_delimited {
+                    Err(vec![self.eof_err()])
+                } else {
+                    Ok((open_spacing, TokenStream::new(buf)))
+                };
+            } else {
+                // Get the next normal token.
+                let (this_tok, this_spacing) = self.bump();
+                buf.push(TokenTree::Token(this_tok, this_spacing));
+            }
+        }
+    }
+
+    fn lex_token_tree_open_delim(
+        &mut self,
+        open_delim: Delimiter,
+    ) -> Result<TokenTree, Vec<Diag<'psess>>> {
+        // The span for beginning of the delimited section.
+        let pre_span = self.token.span;
+
+        self.diag_info.open_delimiters.push((open_delim, self.token.span));
+
+        // Lex the token trees within the delimiters.
+        // We stop at any delimiter so we can try to recover if the user
+        // uses an incorrect delimiter.
+        let (open_spacing, tts) = self.lex_token_trees(/* is_delimited */ true)?;
+
+        // Expand to cover the entire delimited token tree.
+        let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
+        let sm = self.psess.source_map();
+
+        let close_spacing = if let Some(close_delim) = self.token.kind.close_delim() {
+            if close_delim == open_delim {
+                // Correct delimiter.
+                let (open_delimiter, open_delimiter_span) =
+                    self.diag_info.open_delimiters.pop().unwrap();
+                let close_delimiter_span = self.token.span;
+
+                if tts.is_empty() && close_delim == Delimiter::Brace {
+                    let empty_block_span = open_delimiter_span.to(close_delimiter_span);
+                    if !sm.is_multiline(empty_block_span) {
+                        // Only track if the block is in the form of `{}`, otherwise it is
+                        // likely that it was written on purpose.
+                        self.diag_info.empty_block_spans.push(empty_block_span);
+                    }
+                }
+
+                // only add braces
+                if let (Delimiter::Brace, Delimiter::Brace) = (open_delimiter, open_delim) {
+                    // Add all the matching spans, we will sort by span later
+                    self.diag_info
+                        .matching_block_spans
+                        .push((open_delimiter_span, close_delimiter_span));
+                }
+
+                // Move past the closing delimiter.
+                self.bump_minimal()
+            } else {
+                // Incorrect delimiter.
+                let mut unclosed_delimiter = None;
+                let mut candidate = None;
+
+                if self.diag_info.last_unclosed_found_span != Some(self.token.span) {
+                    // do not complain about the same unclosed delimiter multiple times
+                    self.diag_info.last_unclosed_found_span = Some(self.token.span);
+                    // This is a conservative error: only report the last unclosed
+                    // delimiter. The previous unclosed delimiters could actually be
+                    // closed! The lexer just hasn't gotten to them yet.
+                    if let Some(&(_, sp)) = self.diag_info.open_delimiters.last() {
+                        unclosed_delimiter = Some(sp);
+                    };
+                    for (delimiter, delimiter_span) in &self.diag_info.open_delimiters {
+                        if same_indentation_level(sm, self.token.span, *delimiter_span)
+                            && delimiter == &close_delim
+                        {
+                            // high likelihood of these two corresponding
+                            candidate = Some(*delimiter_span);
+                        }
+                    }
+                    let (_, _) = self.diag_info.open_delimiters.pop().unwrap();
+                    self.diag_info.unmatched_delims.push(UnmatchedDelim {
+                        found_delim: Some(close_delim),
+                        found_span: self.token.span,
+                        unclosed_span: unclosed_delimiter,
+                        candidate_span: candidate,
+                    });
+                } else {
+                    self.diag_info.open_delimiters.pop();
+                }
+
+                // If the incorrect delimiter matches an earlier opening
+                // delimiter, then don't consume it (it can be used to
+                // close the earlier one). Otherwise, consume it.
+                // E.g., we try to recover from:
+                // fn foo() {
+                //     bar(baz(
+                // }  // Incorrect delimiter but matches the earlier `{`
+                if !self.diag_info.open_delimiters.iter().any(|&(d, _)| d == close_delim) {
+                    self.bump_minimal()
+                } else {
+                    // The choice of value here doesn't matter.
+                    Spacing::Alone
+                }
+            }
+        } else {
+            assert_eq!(self.token.kind, token::Eof);
+            // Silently recover, the EOF token will be seen again
+            // and an error emitted then. Thus we don't pop from
+            // self.open_delimiters here. The choice of spacing value here
+            // doesn't matter.
+            Spacing::Alone
+        };
+
+        let spacing = DelimSpacing::new(open_spacing, close_spacing);
+
+        Ok(TokenTree::Delimited(delim_span, spacing, open_delim, tts))
+    }
+
+    // Move on to the next token, returning the current token and its spacing.
+    // Will glue adjacent single-char tokens together.
+    fn bump(&mut self) -> (Token, Spacing) {
+        let (this_spacing, next_tok) = loop {
+            let (next_tok, is_next_tok_preceded_by_whitespace) = self.next_token_from_cursor();
+
+            if is_next_tok_preceded_by_whitespace {
+                break (Spacing::Alone, next_tok);
+            } else if let Some(glued) = self.token.glue(&next_tok) {
+                self.token = glued;
+            } else {
+                let this_spacing = self.calculate_spacing(&next_tok);
+                break (this_spacing, next_tok);
+            }
+        };
+        let this_tok = std::mem::replace(&mut self.token, next_tok);
+        (this_tok, this_spacing)
+    }
+
+    // Cut-down version of `bump` used when the token kind is known in advance.
+    fn bump_minimal(&mut self) -> Spacing {
+        let (next_tok, is_next_tok_preceded_by_whitespace) = self.next_token_from_cursor();
+        let this_spacing = if is_next_tok_preceded_by_whitespace {
+            Spacing::Alone
+        } else {
+            self.calculate_spacing(&next_tok)
+        };
+        self.token = next_tok;
+        this_spacing
+    }
+
+    fn calculate_spacing(&self, next_tok: &Token) -> Spacing {
+        if next_tok.is_punct() {
+            Spacing::Joint
+        } else if *next_tok == token::Eof {
+            Spacing::Alone
+        } else {
+            Spacing::JointHidden
+        }
+    }
+
+    fn eof_err(&mut self) -> Diag<'psess> {
+        const UNCLOSED_DELIMITER_SHOW_LIMIT: usize = 5;
+        let msg = "this file contains an unclosed delimiter";
+        let mut err = self.dcx().struct_span_err(self.token.span, msg);
+
+        let len = usize::min(UNCLOSED_DELIMITER_SHOW_LIMIT, self.diag_info.open_delimiters.len());
+        for &(_, span) in &self.diag_info.open_delimiters[..len] {
+            err.span_label(span, "unclosed delimiter");
+            self.diag_info.unmatched_delims.push(UnmatchedDelim {
+                found_delim: None,
+                found_span: self.token.span,
+                unclosed_span: Some(span),
+                candidate_span: None,
+            });
+        }
+
+        if let Some((_, span)) = self.diag_info.open_delimiters.get(UNCLOSED_DELIMITER_SHOW_LIMIT)
+            && self.diag_info.open_delimiters.len() >= UNCLOSED_DELIMITER_SHOW_LIMIT + 2
+        {
+            err.span_label(
+                *span,
+                format!(
+                    "another {} unclosed delimiters begin from here",
+                    self.diag_info.open_delimiters.len() - UNCLOSED_DELIMITER_SHOW_LIMIT
+                ),
+            );
+        }
+
+        if let Some((delim, _)) = self.diag_info.open_delimiters.last() {
+            report_suspicious_mismatch_block(
+                &mut err,
+                &self.diag_info,
+                self.psess.source_map(),
+                *delim,
+            )
+        }
+        err
+    }
+
+    fn close_delim_err(&mut self, delim: Delimiter) -> Diag<'psess> {
+        // An unexpected closing delimiter (i.e., there is no matching opening delimiter).
+        let token_str = token_to_string(&self.token);
+        let msg = format!("unexpected closing delimiter: `{token_str}`");
+        let mut err = self.dcx().struct_span_err(self.token.span, msg);
+
+        // if there is no missing open delim, report suspicious mismatch block
+        if !report_missing_open_delim(&mut err, &mut self.diag_info.unmatched_delims) {
+            report_suspicious_mismatch_block(
+                &mut err,
+                &self.diag_info,
+                self.psess.source_map(),
+                delim,
+            );
+        }
+
+        err.span_label(self.token.span, "unexpected closing delimiter");
+        err
+    }
+}
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
new file mode 100644
index 00000000000..ec59a1a0131
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -0,0 +1,297 @@
+//! Utilities for rendering escape sequence errors as diagnostics.
+
+use std::iter::once;
+use std::ops::Range;
+
+use rustc_errors::{Applicability, DiagCtxtHandle, ErrorGuaranteed};
+use rustc_literal_escaper::{EscapeError, Mode};
+use rustc_span::{BytePos, Span};
+use tracing::debug;
+
+use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError};
+
+pub(crate) fn emit_unescape_error(
+    dcx: DiagCtxtHandle<'_>,
+    // interior part of the literal, between quotes
+    lit: &str,
+    // full span of the literal, including quotes and any prefix
+    full_lit_span: Span,
+    // span of the error part of the literal
+    err_span: Span,
+    mode: Mode,
+    // range of the error inside `lit`
+    range: Range<usize>,
+    error: EscapeError,
+) -> Option<ErrorGuaranteed> {
+    debug!(
+        "emit_unescape_error: {:?}, {:?}, {:?}, {:?}, {:?}",
+        lit, full_lit_span, mode, range, error
+    );
+    let last_char = || {
+        let c = lit[range.clone()].chars().next_back().unwrap();
+        let span = err_span.with_lo(err_span.hi() - BytePos(c.len_utf8() as u32));
+        (c, span)
+    };
+    Some(match error {
+        EscapeError::LoneSurrogateUnicodeEscape => {
+            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true })
+        }
+        EscapeError::OutOfRangeUnicodeEscape => {
+            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false })
+        }
+        EscapeError::MoreThanOneChar => {
+            use unicode_normalization::UnicodeNormalization;
+            use unicode_normalization::char::is_combining_mark;
+            let mut sugg = None;
+            let mut note = None;
+
+            let lit_chars = lit.chars().collect::<Vec<_>>();
+            let (first, rest) = lit_chars.split_first().unwrap();
+            if rest.iter().copied().all(is_combining_mark) {
+                let normalized = lit.nfc().to_string();
+                if normalized.chars().count() == 1 {
+                    let ch = normalized.chars().next().unwrap().escape_default().to_string();
+                    sugg = Some(MoreThanOneCharSugg::NormalizedForm {
+                        span: err_span,
+                        ch,
+                        normalized,
+                    });
+                }
+                let escaped_marks =
+                    rest.iter().map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
+                note = Some(MoreThanOneCharNote::AllCombining {
+                    span: err_span,
+                    chr: format!("{first}"),
+                    len: escaped_marks.len(),
+                    escaped_marks: escaped_marks.join(""),
+                });
+            } else {
+                let printable: Vec<char> = lit
+                    .chars()
+                    .filter(|&x| {
+                        unicode_width::UnicodeWidthChar::width(x).unwrap_or(0) != 0
+                            && !x.is_whitespace()
+                    })
+                    .collect();
+
+                if let &[ch] = printable.as_slice() {
+                    sugg = Some(MoreThanOneCharSugg::RemoveNonPrinting {
+                        span: err_span,
+                        ch: ch.to_string(),
+                    });
+                    note = Some(MoreThanOneCharNote::NonPrinting {
+                        span: err_span,
+                        escaped: lit.escape_default().to_string(),
+                    });
+                }
+            };
+            let sugg = sugg.unwrap_or_else(|| {
+                let prefix = mode.prefix_noraw();
+                let mut escaped = String::with_capacity(lit.len());
+                let mut in_escape = false;
+                for c in lit.chars() {
+                    match c {
+                        '\\' => in_escape = !in_escape,
+                        '"' if !in_escape => escaped.push('\\'),
+                        _ => in_escape = false,
+                    }
+                    escaped.push(c);
+                }
+                if escaped.len() != lit.len() || full_lit_span.is_empty() {
+                    let sugg = format!("{prefix}\"{escaped}\"");
+                    MoreThanOneCharSugg::QuotesFull {
+                        span: full_lit_span,
+                        is_byte: mode == Mode::Byte,
+                        sugg,
+                    }
+                } else {
+                    MoreThanOneCharSugg::Quotes {
+                        start: full_lit_span
+                            .with_hi(full_lit_span.lo() + BytePos((prefix.len() + 1) as u32)),
+                        end: full_lit_span.with_lo(full_lit_span.hi() - BytePos(1)),
+                        is_byte: mode == Mode::Byte,
+                        prefix,
+                    }
+                }
+            });
+            dcx.emit_err(UnescapeError::MoreThanOneChar {
+                span: full_lit_span,
+                note,
+                suggestion: sugg,
+            })
+        }
+        EscapeError::EscapeOnlyChar => {
+            let (c, char_span) = last_char();
+            dcx.emit_err(UnescapeError::EscapeOnlyChar {
+                span: err_span,
+                char_span,
+                escaped_sugg: c.escape_default().to_string(),
+                escaped_msg: escaped_char(c),
+                byte: mode == Mode::Byte,
+            })
+        }
+        EscapeError::BareCarriageReturn => {
+            let double_quotes = mode.in_double_quotes();
+            dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes })
+        }
+        EscapeError::BareCarriageReturnInRawString => {
+            assert!(mode.in_double_quotes());
+            dcx.emit_err(UnescapeError::BareCrRawString(err_span))
+        }
+        EscapeError::InvalidEscape => {
+            let (c, span) = last_char();
+
+            let label = if mode == Mode::Byte || mode == Mode::ByteStr {
+                "unknown byte escape"
+            } else {
+                "unknown character escape"
+            };
+            let ec = escaped_char(c);
+            let mut diag = dcx.struct_span_err(span, format!("{label}: `{ec}`"));
+            diag.span_label(span, label);
+            if c == '{' || c == '}' && matches!(mode, Mode::Str | Mode::RawStr) {
+                diag.help(
+                    "if used in a formatting string, curly braces are escaped with `{{` and `}}`",
+                );
+            } else if c == '\r' {
+                diag.help(
+                    "this is an isolated carriage return; consider checking your editor and \
+                     version control settings",
+                );
+            } else {
+                if mode == Mode::Str || mode == Mode::Char {
+                    diag.span_suggestion(
+                        full_lit_span,
+                        "if you meant to write a literal backslash (perhaps escaping in a regular expression), consider a raw string literal",
+                        format!("r\"{lit}\""),
+                        Applicability::MaybeIncorrect,
+                    );
+                }
+
+                diag.help(
+                    "for more information, visit \
+                     <https://doc.rust-lang.org/reference/tokens.html#literals>",
+                );
+            }
+            diag.emit()
+        }
+        EscapeError::TooShortHexEscape => dcx.emit_err(UnescapeError::TooShortHexEscape(err_span)),
+        EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
+            let (c, span) = last_char();
+            let is_hex = error == EscapeError::InvalidCharInHexEscape;
+            let ch = escaped_char(c);
+            dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch })
+        }
+        EscapeError::NonAsciiCharInByte => {
+            let (c, span) = last_char();
+            let desc = match mode {
+                Mode::Byte => "byte literal",
+                Mode::ByteStr => "byte string literal",
+                Mode::RawByteStr => "raw byte string literal",
+                _ => panic!("non-is_byte literal paired with NonAsciiCharInByte"),
+            };
+            let mut err = dcx.struct_span_err(span, format!("non-ASCII character in {desc}"));
+            let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
+                format!(" but is {c:?}")
+            } else {
+                String::new()
+            };
+            err.span_label(span, format!("must be ASCII{postfix}"));
+            // Note: the \\xHH suggestions are not given for raw byte string
+            // literals, because they are araw and so cannot use any escapes.
+            if (c as u32) <= 0xFF && mode != Mode::RawByteStr {
+                err.span_suggestion(
+                    span,
+                    format!(
+                        "if you meant to use the unicode code point for {c:?}, use a \\xHH escape"
+                    ),
+                    format!("\\x{:X}", c as u32),
+                    Applicability::MaybeIncorrect,
+                );
+            } else if mode == Mode::Byte {
+                err.span_label(span, "this multibyte character does not fit into a single byte");
+            } else if mode != Mode::RawByteStr {
+                let mut utf8 = String::new();
+                utf8.push(c);
+                err.span_suggestion(
+                    span,
+                    format!("if you meant to use the UTF-8 encoding of {c:?}, use \\xHH escapes"),
+                    utf8.as_bytes()
+                        .iter()
+                        .map(|b: &u8| format!("\\x{:X}", *b))
+                        .fold("".to_string(), |a, c| a + &c),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            err.emit()
+        }
+        EscapeError::OutOfRangeHexEscape => {
+            dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span))
+        }
+        EscapeError::LeadingUnderscoreUnicodeEscape => {
+            let (c, span) = last_char();
+            dcx.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
+                span,
+                ch: escaped_char(c),
+            })
+        }
+        EscapeError::OverlongUnicodeEscape => {
+            dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span))
+        }
+        EscapeError::UnclosedUnicodeEscape => {
+            dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()))
+        }
+        EscapeError::NoBraceInUnicodeEscape => {
+            let mut suggestion = "\\u{".to_owned();
+            let mut suggestion_len = 0;
+            let (c, char_span) = last_char();
+            let chars = once(c).chain(lit[range.end..].chars());
+            for c in chars.take(6).take_while(|c| c.is_digit(16)) {
+                suggestion.push(c);
+                suggestion_len += c.len_utf8();
+            }
+
+            let (label, sub) = if suggestion_len > 0 {
+                suggestion.push('}');
+                let hi = char_span.lo() + BytePos(suggestion_len as u32);
+                (None, NoBraceUnicodeSub::Suggestion { span: err_span.with_hi(hi), suggestion })
+            } else {
+                (Some(err_span), NoBraceUnicodeSub::Help)
+            };
+            dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub })
+        }
+        EscapeError::UnicodeEscapeInByte => {
+            dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span))
+        }
+        EscapeError::EmptyUnicodeEscape => {
+            dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span))
+        }
+        EscapeError::ZeroChars => dcx.emit_err(UnescapeError::ZeroChars(err_span)),
+        EscapeError::LoneSlash => dcx.emit_err(UnescapeError::LoneSlash(err_span)),
+        EscapeError::NulInCStr => dcx.emit_err(UnescapeError::NulInCStr { span: err_span }),
+        EscapeError::UnskippedWhitespaceWarning => {
+            let (c, char_span) = last_char();
+            dcx.emit_warn(UnescapeError::UnskippedWhitespace {
+                span: err_span,
+                ch: escaped_char(c),
+                char_span,
+            });
+            return None;
+        }
+        EscapeError::MultipleSkippedLinesWarning => {
+            dcx.emit_warn(UnescapeError::MultipleSkippedLinesWarning(err_span));
+            return None;
+        }
+    })
+}
+
+/// Pushes a character to a message string for error reporting
+pub(crate) fn escaped_char(c: char) -> String {
+    match c {
+        '\u{20}'..='\u{7e}' => {
+            // Don't escape \, ' or " for user-facing messages
+            c.to_string()
+        }
+        _ => c.escape_default().to_string(),
+    }
+}
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
new file mode 100644
index 00000000000..751d13af433
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -0,0 +1,391 @@
+//! Characters and their corresponding confusables were collected from
+//! <https://www.unicode.org/Public/security/10.0.0/confusables.txt>
+
+use rustc_span::{BytePos, Pos, Span, kw};
+
+use super::Lexer;
+use crate::errors::TokenSubstitution;
+use crate::token;
+
+#[rustfmt::skip] // for line breaks
+pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[
+    ('
', "Line Separator", " "),
+    ('
', "Paragraph Separator", " "),
+    (' ', "Ogham Space mark", " "),
+    (' ', "En Quad", " "),
+    (' ', "Em Quad", " "),
+    (' ', "En Space", " "),
+    (' ', "Em Space", " "),
+    (' ', "Three-Per-Em Space", " "),
+    (' ', "Four-Per-Em Space", " "),
+    (' ', "Six-Per-Em Space", " "),
+    (' ', "Punctuation Space", " "),
+    (' ', "Thin Space", " "),
+    (' ', "Hair Space", " "),
+    (' ', "Medium Mathematical Space", " "),
+    (' ', "No-Break Space", " "),
+    (' ', "Figure Space", " "),
+    (' ', "Narrow No-Break Space", " "),
+    (' ', "Ideographic Space", " "),
+
+    ('ߺ', "Nko Lajanyalan", "_"),
+    ('﹍', "Dashed Low Line", "_"),
+    ('﹎', "Centreline Low Line", "_"),
+    ('﹏', "Wavy Low Line", "_"),
+    ('_', "Fullwidth Low Line", "_"),
+
+    ('‐', "Hyphen", "-"),
+    ('‑', "Non-Breaking Hyphen", "-"),
+    ('‒', "Figure Dash", "-"),
+    ('–', "En Dash", "-"),
+    ('—', "Em Dash", "-"),
+    ('﹘', "Small Em Dash", "-"),
+    ('۔', "Arabic Full Stop", "-"),
+    ('⁃', "Hyphen Bullet", "-"),
+    ('˗', "Modifier Letter Minus Sign", "-"),
+    ('−', "Minus Sign", "-"),
+    ('➖', "Heavy Minus Sign", "-"),
+    ('Ⲻ', "Coptic Letter Dialect-P Ni", "-"),
+    ('ー', "Katakana-Hiragana Prolonged Sound Mark", "-"),
+    ('-', "Fullwidth Hyphen-Minus", "-"),
+    ('―', "Horizontal Bar", "-"),
+    ('─', "Box Drawings Light Horizontal", "-"),
+    ('━', "Box Drawings Heavy Horizontal", "-"),
+    ('㇐', "CJK Stroke H", "-"),
+    ('ꟷ', "Latin Epigraphic Letter Sideways I", "-"),
+    ('ᅳ', "Hangul Jungseong Eu", "-"),
+    ('ㅡ', "Hangul Letter Eu", "-"),
+    ('一', "CJK Unified Ideograph-4E00", "-"),
+    ('⼀', "Kangxi Radical One", "-"),
+
+    ('؍', "Arabic Date Separator", ","),
+    ('٫', "Arabic Decimal Separator", ","),
+    ('‚', "Single Low-9 Quotation Mark", ","),
+    ('¸', "Cedilla", ","),
+    ('ꓹ', "Lisu Letter Tone Na Po", ","),
+    (',', "Fullwidth Comma", ","),
+
+    (';', "Greek Question Mark", ";"),
+    (';', "Fullwidth Semicolon", ";"),
+    ('︔', "Presentation Form For Vertical Semicolon", ";"),
+
+    ('ः', "Devanagari Sign Visarga", ":"),
+    ('ઃ', "Gujarati Sign Visarga", ":"),
+    (':', "Fullwidth Colon", ":"),
+    ('։', "Armenian Full Stop", ":"),
+    ('܃', "Syriac Supralinear Colon", ":"),
+    ('܄', "Syriac Sublinear Colon", ":"),
+    ('᛬', "Runic Multiple Punctuation", ":"),
+    ('︰', "Presentation Form For Vertical Two Dot Leader", ":"),
+    ('᠃', "Mongolian Full Stop", ":"),
+    ('᠉', "Mongolian Manchu Full Stop", ":"),
+    ('⁚', "Two Dot Punctuation", ":"),
+    ('׃', "Hebrew Punctuation Sof Pasuq", ":"),
+    ('˸', "Modifier Letter Raised Colon", ":"),
+    ('꞉', "Modifier Letter Colon", ":"),
+    ('∶', "Ratio", ":"),
+    ('ː', "Modifier Letter Triangular Colon", ":"),
+    ('ꓽ', "Lisu Letter Tone Mya Jeu", ":"),
+    ('︓', "Presentation Form For Vertical Colon", ":"),
+
+    ('!', "Fullwidth Exclamation Mark", "!"),
+    ('ǃ', "Latin Letter Retroflex Click", "!"),
+    ('ⵑ', "Tifinagh Letter Tuareg Yang", "!"),
+    ('︕', "Presentation Form For Vertical Exclamation Mark", "!"),
+
+    ('ʔ', "Latin Letter Glottal Stop", "?"),
+    ('Ɂ', "Latin Capital Letter Glottal Stop", "?"),
+    ('ॽ', "Devanagari Letter Glottal Stop", "?"),
+    ('Ꭾ', "Cherokee Letter He", "?"),
+    ('ꛫ', "Bamum Letter Ntuu", "?"),
+    ('?', "Fullwidth Question Mark", "?"),
+    ('︖', "Presentation Form For Vertical Question Mark", "?"),
+
+    ('𝅭', "Musical Symbol Combining Augmentation Dot", "."),
+    ('․', "One Dot Leader", "."),
+    ('܁', "Syriac Supralinear Full Stop", "."),
+    ('܂', "Syriac Sublinear Full Stop", "."),
+    ('꘎', "Vai Full Stop", "."),
+    ('𐩐', "Kharoshthi Punctuation Dot", "."),
+    ('٠', "Arabic-Indic Digit Zero", "."),
+    ('۰', "Extended Arabic-Indic Digit Zero", "."),
+    ('ꓸ', "Lisu Letter Tone Mya Ti", "."),
+    ('·', "Middle Dot", "."),
+    ('・', "Katakana Middle Dot", "."),
+    ('・', "Halfwidth Katakana Middle Dot", "."),
+    ('᛫', "Runic Single Punctuation", "."),
+    ('·', "Greek Ano Teleia", "."),
+    ('⸱', "Word Separator Middle Dot", "."),
+    ('𐄁', "Aegean Word Separator Dot", "."),
+    ('•', "Bullet", "."),
+    ('‧', "Hyphenation Point", "."),
+    ('∙', "Bullet Operator", "."),
+    ('⋅', "Dot Operator", "."),
+    ('ꞏ', "Latin Letter Sinological Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('.', "Fullwidth Full Stop", "."),
+    ('。', "Ideographic Full Stop", "."),
+    ('︒', "Presentation Form For Vertical Ideographic Full Stop", "."),
+
+    ('՝', "Armenian Comma", "'"),
+    (''', "Fullwidth Apostrophe", "'"),
+    ('‘', "Left Single Quotation Mark", "'"),
+    ('’', "Right Single Quotation Mark", "'"),
+    ('‛', "Single High-Reversed-9 Quotation Mark", "'"),
+    ('′', "Prime", "'"),
+    ('‵', "Reversed Prime", "'"),
+    ('՚', "Armenian Apostrophe", "'"),
+    ('׳', "Hebrew Punctuation Geresh", "'"),
+    ('`', "Grave Accent", "'"),
+    ('`', "Greek Varia", "'"),
+    ('`', "Fullwidth Grave Accent", "'"),
+    ('´', "Acute Accent", "'"),
+    ('΄', "Greek Tonos", "'"),
+    ('´', "Greek Oxia", "'"),
+    ('᾽', "Greek Koronis", "'"),
+    ('᾿', "Greek Psili", "'"),
+    ('῾', "Greek Dasia", "'"),
+    ('ʹ', "Modifier Letter Prime", "'"),
+    ('ʹ', "Greek Numeral Sign", "'"),
+    ('ˈ', "Modifier Letter Vertical Line", "'"),
+    ('ˊ', "Modifier Letter Acute Accent", "'"),
+    ('ˋ', "Modifier Letter Grave Accent", "'"),
+    ('˴', "Modifier Letter Middle Grave Accent", "'"),
+    ('ʻ', "Modifier Letter Turned Comma", "'"),
+    ('ʽ', "Modifier Letter Reversed Comma", "'"),
+    ('ʼ', "Modifier Letter Apostrophe", "'"),
+    ('ʾ', "Modifier Letter Right Half Ring", "'"),
+    ('ꞌ', "Latin Small Letter Saltillo", "'"),
+    ('י', "Hebrew Letter Yod", "'"),
+    ('ߴ', "Nko High Tone Apostrophe", "'"),
+    ('ߵ', "Nko Low Tone Apostrophe", "'"),
+    ('ᑊ', "Canadian Syllabics West-Cree P", "'"),
+    ('ᛌ', "Runic Letter Short-Twig-Sol S", "'"),
+    ('𖽑', "Miao Sign Aspiration", "'"),
+    ('𖽒', "Miao Sign Reformed Voicing", "'"),
+
+    ('᳓', "Vedic Sign Nihshvasa", "\""),
+    ('"', "Fullwidth Quotation Mark", "\""),
+    ('“', "Left Double Quotation Mark", "\""),
+    ('”', "Right Double Quotation Mark", "\""),
+    ('‟', "Double High-Reversed-9 Quotation Mark", "\""),
+    ('″', "Double Prime", "\""),
+    ('‶', "Reversed Double Prime", "\""),
+    ('〃', "Ditto Mark", "\""),
+    ('״', "Hebrew Punctuation Gershayim", "\""),
+    ('˝', "Double Acute Accent", "\""),
+    ('ʺ', "Modifier Letter Double Prime", "\""),
+    ('˶', "Modifier Letter Middle Double Acute Accent", "\""),
+    ('˵', "Modifier Letter Middle Double Grave Accent", "\""),
+    ('ˮ', "Modifier Letter Double Apostrophe", "\""),
+    ('ײ', "Hebrew Ligature Yiddish Double Yod", "\""),
+    ('❞', "Heavy Double Comma Quotation Mark Ornament", "\""),
+    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", "\""),
+
+    ('(', "Fullwidth Left Parenthesis", "("),
+    ('❨', "Medium Left Parenthesis Ornament", "("),
+    ('﴾', "Ornate Left Parenthesis", "("),
+
+    (')', "Fullwidth Right Parenthesis", ")"),
+    ('❩', "Medium Right Parenthesis Ornament", ")"),
+    ('﴿', "Ornate Right Parenthesis", ")"),
+
+    ('[', "Fullwidth Left Square Bracket", "["),
+    ('❲', "Light Left Tortoise Shell Bracket Ornament", "["),
+    ('「', "Left Corner Bracket", "["),
+    ('『', "Left White Corner Bracket", "["),
+    ('【', "Left Black Lenticular Bracket", "["),
+    ('〔', "Left Tortoise Shell Bracket", "["),
+    ('〖', "Left White Lenticular Bracket", "["),
+    ('〘', "Left White Tortoise Shell Bracket", "["),
+    ('〚', "Left White Square Bracket", "["),
+
+    (']', "Fullwidth Right Square Bracket", "]"),
+    ('❳', "Light Right Tortoise Shell Bracket Ornament", "]"),
+    ('」', "Right Corner Bracket", "]"),
+    ('』', "Right White Corner Bracket", "]"),
+    ('】', "Right Black Lenticular Bracket", "]"),
+    ('〕', "Right Tortoise Shell Bracket", "]"),
+    ('〗', "Right White Lenticular Bracket", "]"),
+    ('〙', "Right White Tortoise Shell Bracket", "]"),
+    ('〛', "Right White Square Bracket", "]"),
+
+    ('❴', "Medium Left Curly Bracket Ornament", "{"),
+    ('𝄔', "Musical Symbol Brace", "{"),
+    ('{', "Fullwidth Left Curly Bracket", "{"),
+
+    ('❵', "Medium Right Curly Bracket Ornament", "}"),
+    ('}', "Fullwidth Right Curly Bracket", "}"),
+
+    ('⁎', "Low Asterisk", "*"),
+    ('٭', "Arabic Five Pointed Star", "*"),
+    ('∗', "Asterisk Operator", "*"),
+    ('𐌟', "Old Italic Letter Ess", "*"),
+    ('*', "Fullwidth Asterisk", "*"),
+
+    ('᜵', "Philippine Single Punctuation", "/"),
+    ('⁁', "Caret Insertion Point", "/"),
+    ('∕', "Division Slash", "/"),
+    ('⁄', "Fraction Slash", "/"),
+    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", "/"),
+    ('⟋', "Mathematical Rising Diagonal", "/"),
+    ('⧸', "Big Solidus", "/"),
+    ('𝈺', "Greek Instrumental Notation Symbol-47", "/"),
+    ('㇓', "CJK Stroke Sp", "/"),
+    ('〳', "Vertical Kana Repeat Mark Upper Half", "/"),
+    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", "/"),
+    ('ノ', "Katakana Letter No", "/"),
+    ('丿', "CJK Unified Ideograph-4E3F", "/"),
+    ('⼃', "Kangxi Radical Slash", "/"),
+    ('/', "Fullwidth Solidus", "/"),
+
+    ('\', "Fullwidth Reverse Solidus", "\\"),
+    ('﹨', "Small Reverse Solidus", "\\"),
+    ('∖', "Set Minus", "\\"),
+    ('⟍', "Mathematical Falling Diagonal", "\\"),
+    ('⧵', "Reverse Solidus Operator", "\\"),
+    ('⧹', "Big Reverse Solidus", "\\"),
+    ('⧹', "Greek Vocal Notation Symbol-16", "\\"),
+    ('⧹', "Greek Instrumental Symbol-48", "\\"),
+    ('㇔', "CJK Stroke D", "\\"),
+    ('丶', "CJK Unified Ideograph-4E36", "\\"),
+    ('⼂', "Kangxi Radical Dot", "\\"),
+    ('、', "Ideographic Comma", "\\"),
+    ('ヽ', "Katakana Iteration Mark", "\\"),
+
+    ('ꝸ', "Latin Small Letter Um", "&"),
+    ('&', "Fullwidth Ampersand", "&"),
+
+    ('᛭', "Runic Cross Punctuation", "+"),
+    ('➕', "Heavy Plus Sign", "+"),
+    ('𐊛', "Lycian Letter H", "+"),
+    ('﬩', "Hebrew Letter Alternative Plus Sign", "+"),
+    ('+', "Fullwidth Plus Sign", "+"),
+
+    ('‹', "Single Left-Pointing Angle Quotation Mark", "<"),
+    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", "<"),
+    ('˂', "Modifier Letter Left Arrowhead", "<"),
+    ('𝈶', "Greek Instrumental Symbol-40", "<"),
+    ('ᐸ', "Canadian Syllabics Pa", "<"),
+    ('ᚲ', "Runic Letter Kauna", "<"),
+    ('❬', "Medium Left-Pointing Angle Bracket Ornament", "<"),
+    ('⟨', "Mathematical Left Angle Bracket", "<"),
+    ('〈', "Left-Pointing Angle Bracket", "<"),
+    ('〈', "Left Angle Bracket", "<"),
+    ('㇛', "CJK Stroke Pd", "<"),
+    ('く', "Hiragana Letter Ku", "<"),
+    ('𡿨', "CJK Unified Ideograph-21FE8", "<"),
+    ('《', "Left Double Angle Bracket", "<"),
+    ('<', "Fullwidth Less-Than Sign", "<"),
+
+    ('᐀', "Canadian Syllabics Hyphen", "="),
+    ('⹀', "Double Hyphen", "="),
+    ('゠', "Katakana-Hiragana Double Hyphen", "="),
+    ('꓿', "Lisu Punctuation Full Stop", "="),
+    ('=', "Fullwidth Equals Sign", "="),
+
+    ('›', "Single Right-Pointing Angle Quotation Mark", ">"),
+    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", ">"),
+    ('˃', "Modifier Letter Right Arrowhead", ">"),
+    ('𝈷', "Greek Instrumental Symbol-42", ">"),
+    ('ᐳ', "Canadian Syllabics Po", ">"),
+    ('𖼿', "Miao Letter Archaic Zza", ">"),
+    ('❭', "Medium Right-Pointing Angle Bracket Ornament", ">"),
+    ('⟩', "Mathematical Right Angle Bracket", ">"),
+    ('〉', "Right-Pointing Angle Bracket", ">"),
+    ('〉', "Right Angle Bracket", ">"),
+    ('》', "Right Double Angle Bracket", ">"),
+    ('>', "Fullwidth Greater-Than Sign", ">"),
+
+    ('⩵', "Two Consecutive Equals Signs", "==")
+];
+
+// FIXME: the lexer could be used to turn the ASCII version of unicode homoglyphs, instead of
+// keeping the substitution token in this table. Ideally, this should be inside `rustc_lexer`.
+// However, we should first remove compound tokens like `<<` from `rustc_lexer`, and then add
+// fancier error recovery to it, as there will be less overall work to do this way.
+const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
+    (" ", "Space", None),
+    ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
+    ("-", "Minus/Hyphen", Some(token::Minus)),
+    (",", "Comma", Some(token::Comma)),
+    (";", "Semicolon", Some(token::Semi)),
+    (":", "Colon", Some(token::Colon)),
+    ("!", "Exclamation Mark", Some(token::Bang)),
+    ("?", "Question Mark", Some(token::Question)),
+    (".", "Period", Some(token::Dot)),
+    ("(", "Left Parenthesis", Some(token::OpenParen)),
+    (")", "Right Parenthesis", Some(token::CloseParen)),
+    ("[", "Left Square Bracket", Some(token::OpenBracket)),
+    ("]", "Right Square Bracket", Some(token::CloseBracket)),
+    ("{", "Left Curly Brace", Some(token::OpenBrace)),
+    ("}", "Right Curly Brace", Some(token::CloseBrace)),
+    ("*", "Asterisk", Some(token::Star)),
+    ("/", "Slash", Some(token::Slash)),
+    ("\\", "Backslash", None),
+    ("&", "Ampersand", Some(token::And)),
+    ("+", "Plus Sign", Some(token::Plus)),
+    ("<", "Less-Than Sign", Some(token::Lt)),
+    ("=", "Equals Sign", Some(token::Eq)),
+    ("==", "Double Equals Sign", Some(token::EqEq)),
+    (">", "Greater-Than Sign", Some(token::Gt)),
+    // FIXME: Literals are already lexed by this point, so we can't recover gracefully just by
+    // spitting the correct token out.
+    ("'", "Single Quote", None),
+    ("\"", "Quotation Mark", None),
+];
+
+pub(super) fn check_for_substitution(
+    lexer: &Lexer<'_, '_>,
+    pos: BytePos,
+    ch: char,
+    count: usize,
+) -> (Option<token::TokenKind>, Option<TokenSubstitution>) {
+    let Some(&(_, u_name, ascii_str)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else {
+        return (None, None);
+    };
+
+    let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
+
+    let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
+        let msg = format!("substitution character not found for '{ch}'");
+        lexer.dcx().span_bug(span, msg);
+    };
+
+    // special help suggestion for "directed" double quotes
+    let sugg = if let Some(s) = peek_delimited(&lexer.src[lexer.src_index(pos)..], '“', '”') {
+        let span = Span::with_root_ctxt(
+            pos,
+            pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
+        );
+        Some(TokenSubstitution::DirectedQuotes {
+            span,
+            suggestion: format!("\"{s}\""),
+            ascii_str,
+            ascii_name,
+        })
+    } else {
+        let suggestion = ascii_str.to_string().repeat(count);
+        Some(TokenSubstitution::Other {
+            span,
+            suggestion,
+            ch: ch.to_string(),
+            u_name,
+            ascii_str,
+            ascii_name,
+        })
+    };
+    (*token, sugg)
+}
+
+/// Extract string if found at current position with given delimiters
+fn peek_delimited(text: &str, from_ch: char, to_ch: char) -> Option<&str> {
+    let mut chars = text.chars();
+    let first_char = chars.next()?;
+    if first_char != from_ch {
+        return None;
+    }
+    let last_char_idx = chars.as_str().find(to_ch)?;
+    Some(&chars.as_str()[..last_char_idx])
+}
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
new file mode 100644
index 00000000000..2050c5f9608
--- /dev/null
+++ b/compiler/rustc_parse/src/lib.rs
@@ -0,0 +1,246 @@
+//! The main parser interface.
+
+// tidy-alphabetical-start
+#![allow(rustc::diagnostic_outside_of_impl)]
+#![allow(rustc::untranslatable_diagnostic)]
+#![feature(assert_matches)]
+#![feature(box_patterns)]
+#![feature(debug_closure_helpers)]
+#![feature(if_let_guard)]
+#![feature(iter_intersperse)]
+#![recursion_limit = "256"]
+// tidy-alphabetical-end
+
+use std::path::{Path, PathBuf};
+use std::str::Utf8Error;
+use std::sync::Arc;
+
+use rustc_ast as ast;
+use rustc_ast::tokenstream::TokenStream;
+use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
+use rustc_ast_pretty::pprust;
+use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
+use rustc_session::parse::ParseSess;
+use rustc_span::source_map::SourceMap;
+use rustc_span::{FileName, SourceFile, Span};
+pub use unicode_normalization::UNICODE_VERSION as UNICODE_NORMALIZATION_VERSION;
+
+pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
+
+#[macro_use]
+pub mod parser;
+use parser::Parser;
+pub mod lexer;
+pub mod validate_attr;
+
+mod errors;
+
+rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
+
+// Unwrap the result if `Ok`, otherwise emit the diagnostics and abort.
+pub fn unwrap_or_emit_fatal<T>(expr: Result<T, Vec<Diag<'_>>>) -> T {
+    match expr {
+        Ok(expr) => expr,
+        Err(errs) => {
+            for err in errs {
+                err.emit();
+            }
+            FatalError.raise()
+        }
+    }
+}
+
+/// Creates a new parser from a source string. On failure, the errors must be consumed via
+/// `unwrap_or_emit_fatal`, `emit`, `cancel`, etc., otherwise a panic will occur when they are
+/// dropped.
+pub fn new_parser_from_source_str(
+    psess: &ParseSess,
+    name: FileName,
+    source: String,
+) -> Result<Parser<'_>, Vec<Diag<'_>>> {
+    let source_file = psess.source_map().new_source_file(name, source);
+    new_parser_from_source_file(psess, source_file)
+}
+
+/// Creates a new parser from a filename. On failure, the errors must be consumed via
+/// `unwrap_or_emit_fatal`, `emit`, `cancel`, etc., otherwise a panic will occur when they are
+/// dropped.
+///
+/// If a span is given, that is used on an error as the source of the problem.
+pub fn new_parser_from_file<'a>(
+    psess: &'a ParseSess,
+    path: &Path,
+    sp: Option<Span>,
+) -> Result<Parser<'a>, Vec<Diag<'a>>> {
+    let sm = psess.source_map();
+    let source_file = sm.load_file(path).unwrap_or_else(|e| {
+        let msg = format!("couldn't read `{}`: {}", path.display(), e);
+        let mut err = psess.dcx().struct_fatal(msg);
+        if let Ok(contents) = std::fs::read(path)
+            && let Err(utf8err) = String::from_utf8(contents.clone())
+        {
+            utf8_error(
+                sm,
+                &path.display().to_string(),
+                sp,
+                &mut err,
+                utf8err.utf8_error(),
+                &contents,
+            );
+        }
+        if let Some(sp) = sp {
+            err.span(sp);
+        }
+        err.emit();
+    });
+    new_parser_from_source_file(psess, source_file)
+}
+
+pub fn utf8_error<E: EmissionGuarantee>(
+    sm: &SourceMap,
+    path: &str,
+    sp: Option<Span>,
+    err: &mut Diag<'_, E>,
+    utf8err: Utf8Error,
+    contents: &[u8],
+) {
+    // The file exists, but it wasn't valid UTF-8.
+    let start = utf8err.valid_up_to();
+    let note = format!("invalid utf-8 at byte `{start}`");
+    let msg = if let Some(len) = utf8err.error_len() {
+        format!(
+            "byte{s} `{bytes}` {are} not valid utf-8",
+            bytes = if len == 1 {
+                format!("{:?}", contents[start])
+            } else {
+                format!("{:?}", &contents[start..start + len])
+            },
+            s = pluralize!(len),
+            are = if len == 1 { "is" } else { "are" },
+        )
+    } else {
+        note.clone()
+    };
+    let contents = String::from_utf8_lossy(contents).to_string();
+    let source = sm.new_source_file(PathBuf::from(path).into(), contents);
+    let span = Span::with_root_ctxt(
+        source.normalized_byte_pos(start as u32),
+        source.normalized_byte_pos(start as u32),
+    );
+    if span.is_dummy() {
+        err.note(note);
+    } else {
+        if sp.is_some() {
+            err.span_note(span, msg);
+        } else {
+            err.span(span);
+            err.span_label(span, msg);
+        }
+    }
+}
+
+/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
+/// the initial token stream.
+fn new_parser_from_source_file(
+    psess: &ParseSess,
+    source_file: Arc<SourceFile>,
+) -> Result<Parser<'_>, Vec<Diag<'_>>> {
+    let end_pos = source_file.end_position();
+    let stream = source_file_to_stream(psess, source_file, None)?;
+    let mut parser = Parser::new(psess, stream, None);
+    if parser.token == token::Eof {
+        parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
+    }
+    Ok(parser)
+}
+
+pub fn source_str_to_stream(
+    psess: &ParseSess,
+    name: FileName,
+    source: String,
+    override_span: Option<Span>,
+) -> Result<TokenStream, Vec<Diag<'_>>> {
+    let source_file = psess.source_map().new_source_file(name, source);
+    source_file_to_stream(psess, source_file, override_span)
+}
+
+/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
+/// parsing the token stream.
+fn source_file_to_stream<'psess>(
+    psess: &'psess ParseSess,
+    source_file: Arc<SourceFile>,
+    override_span: Option<Span>,
+) -> Result<TokenStream, Vec<Diag<'psess>>> {
+    let src = source_file.src.as_ref().unwrap_or_else(|| {
+        psess.dcx().bug(format!(
+            "cannot lex `source_file` without source: {}",
+            psess.source_map().filename_for_diagnostics(&source_file.name)
+        ));
+    });
+
+    lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span)
+}
+
+/// Runs the given subparser `f` on the tokens of the given `attr`'s item.
+pub fn parse_in<'a, T>(
+    psess: &'a ParseSess,
+    tts: TokenStream,
+    name: &'static str,
+    mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+) -> PResult<'a, T> {
+    let mut parser = Parser::new(psess, tts, Some(name));
+    let result = f(&mut parser)?;
+    if parser.token != token::Eof {
+        parser.unexpected()?;
+    }
+    Ok(result)
+}
+
+pub fn fake_token_stream_for_item(psess: &ParseSess, item: &ast::Item) -> TokenStream {
+    let source = pprust::item_to_string(item);
+    let filename = FileName::macro_expansion_source_code(&source);
+    unwrap_or_emit_fatal(source_str_to_stream(psess, filename, source, Some(item.span)))
+}
+
+pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> TokenStream {
+    let source = pprust::crate_to_string_for_macros(krate);
+    let filename = FileName::macro_expansion_source_code(&source);
+    unwrap_or_emit_fatal(source_str_to_stream(
+        psess,
+        filename,
+        source,
+        Some(krate.spans.inner_span),
+    ))
+}
+
+pub fn parse_cfg_attr(
+    cfg_attr: &Attribute,
+    psess: &ParseSess,
+) -> Option<(MetaItemInner, Vec<(AttrItem, Span)>)> {
+    const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]";
+    const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
+        <https://doc.rust-lang.org/reference/conditional-compilation.html#the-cfg_attr-attribute>";
+
+    match cfg_attr.get_normal_item().args {
+        ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
+            if !tokens.is_empty() =>
+        {
+            crate::validate_attr::check_cfg_attr_bad_delim(psess, dspan, delim);
+            match parse_in(psess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
+                Ok(r) => return Some(r),
+                Err(e) => {
+                    e.with_help(format!("the valid syntax is `{CFG_ATTR_GRAMMAR_HELP}`"))
+                        .with_note(CFG_ATTR_NOTE_REF)
+                        .emit();
+                }
+            }
+        }
+        _ => {
+            psess.dcx().emit_err(errors::MalformedCfgAttr {
+                span: cfg_attr.span,
+                sugg: CFG_ATTR_GRAMMAR_HELP,
+            });
+        }
+    }
+    None
+}
diff --git a/compiler/rustc_parse/src/parser/asm.rs b/compiler/rustc_parse/src/parser/asm.rs
new file mode 100644
index 00000000000..d4d0612a317
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/asm.rs
@@ -0,0 +1,385 @@
+use rustc_ast::ptr::P;
+use rustc_ast::{self as ast, AsmMacro};
+use rustc_span::{Span, Symbol, kw};
+
+use super::{ExpKeywordPair, ForceCollect, IdentIsRaw, Trailing, UsePreAttrPos};
+use crate::{PResult, Parser, errors, exp, token};
+
+/// An argument to one of the `asm!` macros. The argument is syntactically valid, but is otherwise
+/// not validated at all.
+pub struct AsmArg {
+    pub kind: AsmArgKind,
+    pub attributes: AsmAttrVec,
+    pub span: Span,
+}
+
+pub enum AsmArgKind {
+    Template(P<ast::Expr>),
+    Operand(Option<Symbol>, ast::InlineAsmOperand),
+    Options(Vec<AsmOption>),
+    ClobberAbi(Vec<(Symbol, Span)>),
+}
+
+pub struct AsmOption {
+    pub symbol: Symbol,
+    pub span: Span,
+    // A bitset, with only the bit for this option's symbol set.
+    pub options: ast::InlineAsmOptions,
+    // Used when suggesting to remove an option.
+    pub span_with_comma: Span,
+}
+
+/// A parsed list of attributes that is not attached to any item.
+/// Used to check whether `asm!` arguments are configured out.
+pub struct AsmAttrVec(pub ast::AttrVec);
+
+impl AsmAttrVec {
+    fn parse<'a>(p: &mut Parser<'a>) -> PResult<'a, Self> {
+        let attrs = p.parse_outer_attributes()?;
+
+        p.collect_tokens(None, attrs, ForceCollect::No, |_, attrs| {
+            Ok((Self(attrs), Trailing::No, UsePreAttrPos::No))
+        })
+    }
+}
+impl ast::HasAttrs for AsmAttrVec {
+    // Follows `ast::Expr`.
+    const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
+
+    fn attrs(&self) -> &[rustc_ast::Attribute] {
+        &self.0
+    }
+
+    fn visit_attrs(&mut self, f: impl FnOnce(&mut rustc_ast::AttrVec)) {
+        f(&mut self.0)
+    }
+}
+
+impl ast::HasTokens for AsmAttrVec {
+    fn tokens(&self) -> Option<&rustc_ast::tokenstream::LazyAttrTokenStream> {
+        None
+    }
+
+    fn tokens_mut(&mut self) -> Option<&mut Option<rustc_ast::tokenstream::LazyAttrTokenStream>> {
+        None
+    }
+}
+
+/// Used for better error messages when operand types are used that are not
+/// supported by the current macro (e.g. `in` or `out` for `global_asm!`)
+///
+/// returns
+///
+/// - `Ok(true)` if the current token matches the keyword, and was expected
+/// - `Ok(false)` if the current token does not match the keyword
+/// - `Err(_)` if the current token matches the keyword, but was not expected
+fn eat_operand_keyword<'a>(
+    p: &mut Parser<'a>,
+    exp: ExpKeywordPair,
+    asm_macro: AsmMacro,
+) -> PResult<'a, bool> {
+    if matches!(asm_macro, AsmMacro::Asm) {
+        Ok(p.eat_keyword(exp))
+    } else {
+        let span = p.token.span;
+        if p.eat_keyword_noexpect(exp.kw) {
+            // in gets printed as `r#in` otherwise
+            let symbol = if exp.kw == kw::In { "in" } else { exp.kw.as_str() };
+            Err(p.dcx().create_err(errors::AsmUnsupportedOperand {
+                span,
+                symbol,
+                macro_name: asm_macro.macro_name(),
+            }))
+        } else {
+            Ok(false)
+        }
+    }
+}
+
+fn parse_asm_operand<'a>(
+    p: &mut Parser<'a>,
+    asm_macro: AsmMacro,
+) -> PResult<'a, Option<ast::InlineAsmOperand>> {
+    let dcx = p.dcx();
+
+    Ok(Some(if eat_operand_keyword(p, exp!(In), asm_macro)? {
+        let reg = parse_reg(p)?;
+        if p.eat_keyword(exp!(Underscore)) {
+            let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
+            return Err(err);
+        }
+        let expr = p.parse_expr()?;
+        ast::InlineAsmOperand::In { reg, expr }
+    } else if eat_operand_keyword(p, exp!(Out), asm_macro)? {
+        let reg = parse_reg(p)?;
+        let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
+        ast::InlineAsmOperand::Out { reg, expr, late: false }
+    } else if eat_operand_keyword(p, exp!(Lateout), asm_macro)? {
+        let reg = parse_reg(p)?;
+        let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
+        ast::InlineAsmOperand::Out { reg, expr, late: true }
+    } else if eat_operand_keyword(p, exp!(Inout), asm_macro)? {
+        let reg = parse_reg(p)?;
+        if p.eat_keyword(exp!(Underscore)) {
+            let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
+            return Err(err);
+        }
+        let expr = p.parse_expr()?;
+        if p.eat(exp!(FatArrow)) {
+            let out_expr =
+                if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
+            ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: false }
+        } else {
+            ast::InlineAsmOperand::InOut { reg, expr, late: false }
+        }
+    } else if eat_operand_keyword(p, exp!(Inlateout), asm_macro)? {
+        let reg = parse_reg(p)?;
+        if p.eat_keyword(exp!(Underscore)) {
+            let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
+            return Err(err);
+        }
+        let expr = p.parse_expr()?;
+        if p.eat(exp!(FatArrow)) {
+            let out_expr =
+                if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
+            ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: true }
+        } else {
+            ast::InlineAsmOperand::InOut { reg, expr, late: true }
+        }
+    } else if eat_operand_keyword(p, exp!(Label), asm_macro)? {
+        let block = p.parse_block()?;
+        ast::InlineAsmOperand::Label { block }
+    } else if p.eat_keyword(exp!(Const)) {
+        let anon_const = p.parse_expr_anon_const()?;
+        ast::InlineAsmOperand::Const { anon_const }
+    } else if p.eat_keyword(exp!(Sym)) {
+        let expr = p.parse_expr()?;
+        let ast::ExprKind::Path(qself, path) = &expr.kind else {
+            let err = dcx.create_err(errors::AsmSymNoPath { span: expr.span });
+            return Err(err);
+        };
+        let sym =
+            ast::InlineAsmSym { id: ast::DUMMY_NODE_ID, qself: qself.clone(), path: path.clone() };
+        ast::InlineAsmOperand::Sym { sym }
+    } else {
+        return Ok(None);
+    }))
+}
+
+// Public for rustfmt.
+pub fn parse_asm_args<'a>(
+    p: &mut Parser<'a>,
+    sp: Span,
+    asm_macro: AsmMacro,
+) -> PResult<'a, Vec<AsmArg>> {
+    let dcx = p.dcx();
+
+    if p.token == token::Eof {
+        return Err(dcx.create_err(errors::AsmRequiresTemplate { span: sp }));
+    }
+
+    let mut args = Vec::new();
+
+    let attributes = AsmAttrVec::parse(p)?;
+    let first_template = p.parse_expr()?;
+    args.push(AsmArg {
+        span: first_template.span,
+        kind: AsmArgKind::Template(first_template),
+        attributes,
+    });
+
+    let mut allow_templates = true;
+
+    while p.token != token::Eof {
+        if !p.eat(exp!(Comma)) {
+            if allow_templates {
+                // After a template string, we always expect *only* a comma...
+                return Err(dcx.create_err(errors::AsmExpectedComma { span: p.token.span }));
+            } else {
+                // ...after that delegate to `expect` to also include the other expected tokens.
+                return Err(p.expect(exp!(Comma)).err().unwrap());
+            }
+        }
+
+        // Accept trailing commas.
+        if p.token == token::Eof {
+            break;
+        }
+
+        let attributes = AsmAttrVec::parse(p)?;
+        let span_start = p.token.span;
+
+        // Parse `clobber_abi`.
+        if p.eat_keyword(exp!(ClobberAbi)) {
+            allow_templates = false;
+
+            args.push(AsmArg {
+                kind: AsmArgKind::ClobberAbi(parse_clobber_abi(p)?),
+                span: span_start.to(p.prev_token.span),
+                attributes,
+            });
+
+            continue;
+        }
+
+        // Parse `options`.
+        if p.eat_keyword(exp!(Options)) {
+            allow_templates = false;
+
+            args.push(AsmArg {
+                kind: AsmArgKind::Options(parse_options(p, asm_macro)?),
+                span: span_start.to(p.prev_token.span),
+                attributes,
+            });
+
+            continue;
+        }
+
+        // Parse operand names.
+        let name = if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
+            let (ident, _) = p.token.ident().unwrap();
+            p.bump();
+            p.expect(exp!(Eq))?;
+            allow_templates = false;
+            Some(ident.name)
+        } else {
+            None
+        };
+
+        if let Some(op) = parse_asm_operand(p, asm_macro)? {
+            allow_templates = false;
+
+            args.push(AsmArg {
+                span: span_start.to(p.prev_token.span),
+                kind: AsmArgKind::Operand(name, op),
+                attributes,
+            });
+        } else if allow_templates {
+            let template = p.parse_expr()?;
+            // If it can't possibly expand to a string, provide diagnostics here to include other
+            // things it could have been.
+            match template.kind {
+                ast::ExprKind::Lit(token_lit)
+                    if matches!(
+                        token_lit.kind,
+                        token::LitKind::Str | token::LitKind::StrRaw(_)
+                    ) => {}
+                ast::ExprKind::MacCall(..) => {}
+                _ => {
+                    let err = dcx.create_err(errors::AsmExpectedOther {
+                        span: template.span,
+                        is_inline_asm: matches!(asm_macro, AsmMacro::Asm),
+                    });
+                    return Err(err);
+                }
+            }
+
+            args.push(AsmArg {
+                span: template.span,
+                kind: AsmArgKind::Template(template),
+                attributes,
+            });
+        } else {
+            p.unexpected_any()?
+        }
+    }
+
+    Ok(args)
+}
+
+fn parse_options<'a>(p: &mut Parser<'a>, asm_macro: AsmMacro) -> PResult<'a, Vec<AsmOption>> {
+    p.expect(exp!(OpenParen))?;
+
+    let mut asm_options = Vec::new();
+
+    while !p.eat(exp!(CloseParen)) {
+        const OPTIONS: [(ExpKeywordPair, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
+            (exp!(Pure), ast::InlineAsmOptions::PURE),
+            (exp!(Nomem), ast::InlineAsmOptions::NOMEM),
+            (exp!(Readonly), ast::InlineAsmOptions::READONLY),
+            (exp!(PreservesFlags), ast::InlineAsmOptions::PRESERVES_FLAGS),
+            (exp!(Noreturn), ast::InlineAsmOptions::NORETURN),
+            (exp!(Nostack), ast::InlineAsmOptions::NOSTACK),
+            (exp!(MayUnwind), ast::InlineAsmOptions::MAY_UNWIND),
+            (exp!(AttSyntax), ast::InlineAsmOptions::ATT_SYNTAX),
+            (exp!(Raw), ast::InlineAsmOptions::RAW),
+        ];
+
+        'blk: {
+            for (exp, options) in OPTIONS {
+                // Gives a more accurate list of expected next tokens.
+                let kw_matched = if asm_macro.is_supported_option(options) {
+                    p.eat_keyword(exp)
+                } else {
+                    p.eat_keyword_noexpect(exp.kw)
+                };
+
+                if kw_matched {
+                    let span = p.prev_token.span;
+                    let span_with_comma =
+                        if p.token == token::Comma { span.to(p.token.span) } else { span };
+
+                    asm_options.push(AsmOption { symbol: exp.kw, span, options, span_with_comma });
+                    break 'blk;
+                }
+            }
+
+            return p.unexpected_any();
+        }
+
+        // Allow trailing commas.
+        if p.eat(exp!(CloseParen)) {
+            break;
+        }
+        p.expect(exp!(Comma))?;
+    }
+
+    Ok(asm_options)
+}
+
+fn parse_clobber_abi<'a>(p: &mut Parser<'a>) -> PResult<'a, Vec<(Symbol, Span)>> {
+    p.expect(exp!(OpenParen))?;
+
+    if p.eat(exp!(CloseParen)) {
+        return Err(p.dcx().create_err(errors::NonABI { span: p.token.span }));
+    }
+
+    let mut new_abis = Vec::new();
+    while !p.eat(exp!(CloseParen)) {
+        match p.parse_str_lit() {
+            Ok(str_lit) => {
+                new_abis.push((str_lit.symbol_unescaped, str_lit.span));
+            }
+            Err(opt_lit) => {
+                let span = opt_lit.map_or(p.token.span, |lit| lit.span);
+                return Err(p.dcx().create_err(errors::AsmExpectedStringLiteral { span }));
+            }
+        };
+
+        // Allow trailing commas
+        if p.eat(exp!(CloseParen)) {
+            break;
+        }
+        p.expect(exp!(Comma))?;
+    }
+
+    Ok(new_abis)
+}
+
+fn parse_reg<'a>(p: &mut Parser<'a>) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
+    p.expect(exp!(OpenParen))?;
+    let result = match p.token.uninterpolate().kind {
+        token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
+        token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
+            ast::InlineAsmRegOrRegClass::Reg(symbol)
+        }
+        _ => {
+            return Err(p.dcx().create_err(errors::ExpectedRegisterClassOrExplicitRegister {
+                span: p.token.span,
+            }));
+        }
+    };
+    p.bump();
+    p.expect(exp!(CloseParen))?;
+    Ok(result)
+}
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
new file mode 100644
index 00000000000..41d3889c448
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -0,0 +1,512 @@
+use rustc_ast as ast;
+use rustc_ast::token::{self, MetaVarKind};
+use rustc_ast::tokenstream::ParserRange;
+use rustc_ast::{Attribute, attr};
+use rustc_errors::codes::*;
+use rustc_errors::{Diag, PResult};
+use rustc_span::{BytePos, Span};
+use thin_vec::ThinVec;
+use tracing::debug;
+
+use super::{
+    AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
+};
+use crate::{errors, exp, fluent_generated as fluent};
+
+// Public for rustfmt usage
+#[derive(Debug)]
+pub enum InnerAttrPolicy {
+    Permitted,
+    Forbidden(Option<InnerAttrForbiddenReason>),
+}
+
+#[derive(Clone, Copy, Debug)]
+pub enum InnerAttrForbiddenReason {
+    InCodeBlock,
+    AfterOuterDocComment { prev_doc_comment_span: Span },
+    AfterOuterAttribute { prev_outer_attr_sp: Span },
+}
+
+enum OuterAttributeType {
+    DocComment,
+    DocBlockComment,
+    Attribute,
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum AllowLeadingUnsafe {
+    Yes,
+    No,
+}
+
+impl<'a> Parser<'a> {
+    /// Parses attributes that appear before an item.
+    pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
+        let mut outer_attrs = ast::AttrVec::new();
+        let mut just_parsed_doc_comment = false;
+        let start_pos = self.num_bump_calls;
+        loop {
+            let attr = if self.check(exp!(Pound)) {
+                let prev_outer_attr_sp = outer_attrs.last().map(|attr: &Attribute| attr.span);
+
+                let inner_error_reason = if just_parsed_doc_comment {
+                    Some(InnerAttrForbiddenReason::AfterOuterDocComment {
+                        prev_doc_comment_span: prev_outer_attr_sp.unwrap(),
+                    })
+                } else {
+                    prev_outer_attr_sp.map(|prev_outer_attr_sp| {
+                        InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }
+                    })
+                };
+                let inner_parse_policy = InnerAttrPolicy::Forbidden(inner_error_reason);
+                just_parsed_doc_comment = false;
+                Some(self.parse_attribute(inner_parse_policy)?)
+            } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
+                if attr_style != ast::AttrStyle::Outer {
+                    let span = self.token.span;
+                    let mut err = self
+                        .dcx()
+                        .struct_span_err(span, fluent::parse_inner_doc_comment_not_permitted);
+                    err.code(E0753);
+                    if let Some(replacement_span) = self.annotate_following_item_if_applicable(
+                        &mut err,
+                        span,
+                        match comment_kind {
+                            token::CommentKind::Line => OuterAttributeType::DocComment,
+                            token::CommentKind::Block => OuterAttributeType::DocBlockComment,
+                        },
+                        true,
+                    ) {
+                        err.note(fluent::parse_note);
+                        err.span_suggestion_verbose(
+                            replacement_span,
+                            fluent::parse_suggestion,
+                            "",
+                            rustc_errors::Applicability::MachineApplicable,
+                        );
+                    }
+                    err.emit();
+                }
+                self.bump();
+                just_parsed_doc_comment = true;
+                // Always make an outer attribute - this allows us to recover from a misplaced
+                // inner attribute.
+                Some(attr::mk_doc_comment(
+                    &self.psess.attr_id_generator,
+                    comment_kind,
+                    ast::AttrStyle::Outer,
+                    data,
+                    self.prev_token.span,
+                ))
+            } else {
+                None
+            };
+
+            if let Some(attr) = attr {
+                if attr.style == ast::AttrStyle::Outer {
+                    outer_attrs.push(attr);
+                }
+            } else {
+                break;
+            }
+        }
+        Ok(AttrWrapper::new(outer_attrs, start_pos))
+    }
+
+    /// Matches `attribute = # ! [ meta_item ]`.
+    /// `inner_parse_policy` prescribes how to handle inner attributes.
+    // Public for rustfmt usage.
+    pub fn parse_attribute(
+        &mut self,
+        inner_parse_policy: InnerAttrPolicy,
+    ) -> PResult<'a, ast::Attribute> {
+        debug!(
+            "parse_attribute: inner_parse_policy={:?} self.token={:?}",
+            inner_parse_policy, self.token
+        );
+        let lo = self.token.span;
+        // Attributes can't have attributes of their own [Editor's note: not with that attitude]
+        self.collect_tokens_no_attrs(|this| {
+            let pound_hi = this.token.span.hi();
+            assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
+
+            let not_lo = this.token.span.lo();
+            let style =
+                if this.eat(exp!(Bang)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
+
+            let mut bracket_res = this.expect(exp!(OpenBracket));
+            // If `#!` is not followed by `[`
+            if let Err(err) = &mut bracket_res
+                && style == ast::AttrStyle::Inner
+                && pound_hi == not_lo
+            {
+                err.note(
+                    "the token sequence `#!` here looks like the start of \
+                    a shebang interpreter directive but it is not",
+                );
+                err.help(
+                    "if you meant this to be a shebang interpreter directive, \
+                    move it to the very start of the file",
+                );
+            }
+            bracket_res?;
+            let item = this.parse_attr_item(ForceCollect::No)?;
+            this.expect(exp!(CloseBracket))?;
+            let attr_sp = lo.to(this.prev_token.span);
+
+            // Emit error if inner attribute is encountered and forbidden.
+            if style == ast::AttrStyle::Inner {
+                this.error_on_forbidden_inner_attr(
+                    attr_sp,
+                    inner_parse_policy,
+                    item.is_valid_for_outer_style(),
+                );
+            }
+
+            Ok(attr::mk_attr_from_item(&self.psess.attr_id_generator, item, None, style, attr_sp))
+        })
+    }
+
+    fn annotate_following_item_if_applicable(
+        &self,
+        err: &mut Diag<'_>,
+        span: Span,
+        attr_type: OuterAttributeType,
+        suggest_to_outer: bool,
+    ) -> Option<Span> {
+        let mut snapshot = self.create_snapshot_for_diagnostic();
+        let lo = span.lo()
+            + BytePos(match attr_type {
+                OuterAttributeType::Attribute => 1,
+                _ => 2,
+            });
+        let hi = lo + BytePos(1);
+        let replacement_span = span.with_lo(lo).with_hi(hi);
+        if let OuterAttributeType::DocBlockComment | OuterAttributeType::DocComment = attr_type {
+            snapshot.bump();
+        }
+        loop {
+            // skip any other attributes, we want the item
+            if snapshot.token == token::Pound {
+                if let Err(err) = snapshot.parse_attribute(InnerAttrPolicy::Permitted) {
+                    err.cancel();
+                    return Some(replacement_span);
+                }
+            } else {
+                break;
+            }
+        }
+        match snapshot.parse_item_common(
+            AttrWrapper::empty(),
+            true,
+            false,
+            FnParseMode { req_name: |_| true, req_body: true },
+            ForceCollect::No,
+        ) {
+            Ok(Some(item)) => {
+                // FIXME(#100717)
+                err.arg("item", item.kind.descr());
+                err.span_label(item.span, fluent::parse_label_does_not_annotate_this);
+                if suggest_to_outer {
+                    err.span_suggestion_verbose(
+                        replacement_span,
+                        fluent::parse_sugg_change_inner_to_outer,
+                        match attr_type {
+                            OuterAttributeType::Attribute => "",
+                            OuterAttributeType::DocBlockComment => "*",
+                            OuterAttributeType::DocComment => "/",
+                        },
+                        rustc_errors::Applicability::MachineApplicable,
+                    );
+                }
+                return None;
+            }
+            Err(item_err) => {
+                item_err.cancel();
+            }
+            Ok(None) => {}
+        }
+        Some(replacement_span)
+    }
+
+    pub(super) fn error_on_forbidden_inner_attr(
+        &self,
+        attr_sp: Span,
+        policy: InnerAttrPolicy,
+        suggest_to_outer: bool,
+    ) {
+        if let InnerAttrPolicy::Forbidden(reason) = policy {
+            let mut diag = match reason.as_ref().copied() {
+                Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
+                    self.dcx()
+                        .struct_span_err(
+                            attr_sp,
+                            fluent::parse_inner_attr_not_permitted_after_outer_doc_comment,
+                        )
+                        .with_span_label(attr_sp, fluent::parse_label_attr)
+                        .with_span_label(
+                            prev_doc_comment_span,
+                            fluent::parse_label_prev_doc_comment,
+                        )
+                }
+                Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => self
+                    .dcx()
+                    .struct_span_err(
+                        attr_sp,
+                        fluent::parse_inner_attr_not_permitted_after_outer_attr,
+                    )
+                    .with_span_label(attr_sp, fluent::parse_label_attr)
+                    .with_span_label(prev_outer_attr_sp, fluent::parse_label_prev_attr),
+                Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
+                    self.dcx().struct_span_err(attr_sp, fluent::parse_inner_attr_not_permitted)
+                }
+            };
+
+            diag.note(fluent::parse_inner_attr_explanation);
+            if self
+                .annotate_following_item_if_applicable(
+                    &mut diag,
+                    attr_sp,
+                    OuterAttributeType::Attribute,
+                    suggest_to_outer,
+                )
+                .is_some()
+            {
+                diag.note(fluent::parse_outer_attr_explanation);
+            };
+            diag.emit();
+        }
+    }
+
+    /// Parses an inner part of an attribute (the path and following tokens).
+    /// The tokens must be either a delimited token stream, or empty token stream,
+    /// or the "legacy" key-value form.
+    ///     PATH `(` TOKEN_STREAM `)`
+    ///     PATH `[` TOKEN_STREAM `]`
+    ///     PATH `{` TOKEN_STREAM `}`
+    ///     PATH
+    ///     PATH `=` UNSUFFIXED_LIT
+    /// The delimiters or `=` are still put into the resulting token stream.
+    pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
+        if let Some(item) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Meta { .. }),
+            |this| this.parse_attr_item(force_collect),
+        ) {
+            return Ok(item);
+        }
+
+        // Attr items don't have attributes.
+        self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
+            let is_unsafe = this.eat_keyword(exp!(Unsafe));
+            let unsafety = if is_unsafe {
+                let unsafe_span = this.prev_token.span;
+                this.expect(exp!(OpenParen))?;
+                ast::Safety::Unsafe(unsafe_span)
+            } else {
+                ast::Safety::Default
+            };
+
+            let path = this.parse_path(PathStyle::Mod)?;
+            let args = this.parse_attr_args()?;
+            if is_unsafe {
+                this.expect(exp!(CloseParen))?;
+            }
+            Ok((
+                ast::AttrItem { unsafety, path, args, tokens: None },
+                Trailing::No,
+                UsePreAttrPos::No,
+            ))
+        })
+    }
+
+    /// Parses attributes that appear after the opening of an item. These should
+    /// be preceded by an exclamation mark, but we accept and warn about one
+    /// terminated by a semicolon.
+    ///
+    /// Matches `inner_attrs*`.
+    pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
+        let mut attrs = ast::AttrVec::new();
+        loop {
+            let start_pos = self.num_bump_calls;
+            // Only try to parse if it is an inner attribute (has `!`).
+            let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Bang) {
+                Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
+            } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
+                if attr_style == ast::AttrStyle::Inner {
+                    self.bump();
+                    Some(attr::mk_doc_comment(
+                        &self.psess.attr_id_generator,
+                        comment_kind,
+                        attr_style,
+                        data,
+                        self.prev_token.span,
+                    ))
+                } else {
+                    None
+                }
+            } else {
+                None
+            };
+            if let Some(attr) = attr {
+                // If we are currently capturing tokens (i.e. we are within a call to
+                // `Parser::collect_tokens`) record the token positions of this inner attribute,
+                // for possible later processing in a `LazyAttrTokenStream`.
+                if let Capturing::Yes = self.capture_state.capturing {
+                    let end_pos = self.num_bump_calls;
+                    let parser_range = ParserRange(start_pos..end_pos);
+                    self.capture_state.inner_attr_parser_ranges.insert(attr.id, parser_range);
+                }
+                attrs.push(attr);
+            } else {
+                break;
+            }
+        }
+        Ok(attrs)
+    }
+
+    // Note: must be unsuffixed.
+    pub(crate) fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'a, ast::MetaItemLit> {
+        let lit = self.parse_meta_item_lit()?;
+        debug!("checking if {:?} is unsuffixed", lit);
+
+        if !lit.kind.is_unsuffixed() {
+            self.dcx().emit_err(errors::SuffixedLiteralInAttribute { span: lit.span });
+        }
+
+        Ok(lit)
+    }
+
+    /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited.
+    pub fn parse_cfg_attr(
+        &mut self,
+    ) -> PResult<'a, (ast::MetaItemInner, Vec<(ast::AttrItem, Span)>)> {
+        let cfg_predicate = self.parse_meta_item_inner()?;
+        self.expect(exp!(Comma))?;
+
+        // Presumably, the majority of the time there will only be one attr.
+        let mut expanded_attrs = Vec::with_capacity(1);
+        while self.token != token::Eof {
+            let lo = self.token.span;
+            let item = self.parse_attr_item(ForceCollect::Yes)?;
+            expanded_attrs.push((item, lo.to(self.prev_token.span)));
+            if !self.eat(exp!(Comma)) {
+                break;
+            }
+        }
+
+        Ok((cfg_predicate, expanded_attrs))
+    }
+
+    /// Matches `COMMASEP(meta_item_inner)`.
+    pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::MetaItemInner>> {
+        // Presumably, the majority of the time there will only be one attr.
+        let mut nmis = ThinVec::with_capacity(1);
+        while self.token != token::Eof {
+            nmis.push(self.parse_meta_item_inner()?);
+            if !self.eat(exp!(Comma)) {
+                break;
+            }
+        }
+        Ok(nmis)
+    }
+
+    /// Parse a meta item per RFC 1559.
+    ///
+    /// ```ebnf
+    /// MetaItem = SimplePath ( '=' UNSUFFIXED_LIT | '(' MetaSeq? ')' )? ;
+    /// MetaSeq = MetaItemInner (',' MetaItemInner)* ','? ;
+    /// ```
+    pub fn parse_meta_item(
+        &mut self,
+        unsafe_allowed: AllowLeadingUnsafe,
+    ) -> PResult<'a, ast::MetaItem> {
+        if let Some(MetaVarKind::Meta { has_meta_form }) = self.token.is_metavar_seq() {
+            return if has_meta_form {
+                let attr_item = self
+                    .eat_metavar_seq(MetaVarKind::Meta { has_meta_form: true }, |this| {
+                        this.parse_attr_item(ForceCollect::No)
+                    })
+                    .unwrap();
+                Ok(attr_item.meta(attr_item.path.span).unwrap())
+            } else {
+                self.unexpected_any()
+            };
+        }
+
+        let lo = self.token.span;
+        let is_unsafe = if unsafe_allowed == AllowLeadingUnsafe::Yes {
+            self.eat_keyword(exp!(Unsafe))
+        } else {
+            false
+        };
+        let unsafety = if is_unsafe {
+            let unsafe_span = self.prev_token.span;
+            self.expect(exp!(OpenParen))?;
+
+            ast::Safety::Unsafe(unsafe_span)
+        } else {
+            ast::Safety::Default
+        };
+
+        let path = self.parse_path(PathStyle::Mod)?;
+        let kind = self.parse_meta_item_kind()?;
+        if is_unsafe {
+            self.expect(exp!(CloseParen))?;
+        }
+        let span = lo.to(self.prev_token.span);
+
+        Ok(ast::MetaItem { unsafety, path, kind, span })
+    }
+
+    pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
+        Ok(if self.eat(exp!(Eq)) {
+            ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
+        } else if self.check(exp!(OpenParen)) {
+            let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
+            ast::MetaItemKind::List(list)
+        } else {
+            ast::MetaItemKind::Word
+        })
+    }
+
+    /// Parse an inner meta item per RFC 1559.
+    ///
+    /// ```ebnf
+    /// MetaItemInner = UNSUFFIXED_LIT | MetaItem ;
+    /// ```
+    pub fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::MetaItemInner> {
+        match self.parse_unsuffixed_meta_item_lit() {
+            Ok(lit) => return Ok(ast::MetaItemInner::Lit(lit)),
+            Err(err) => err.cancel(), // we provide a better error below
+        }
+
+        match self.parse_meta_item(AllowLeadingUnsafe::No) {
+            Ok(mi) => return Ok(ast::MetaItemInner::MetaItem(mi)),
+            Err(err) => err.cancel(), // we provide a better error below
+        }
+
+        let mut err = errors::InvalidMetaItem {
+            span: self.token.span,
+            descr: super::token_descr(&self.token),
+            quote_ident_sugg: None,
+        };
+
+        // Suggest quoting idents, e.g. in `#[cfg(key = value)]`. We don't use `Token::ident` and
+        // don't `uninterpolate` the token to avoid suggesting anything butchered or questionable
+        // when macro metavariables are involved.
+        if self.prev_token == token::Eq
+            && let token::Ident(..) = self.token.kind
+        {
+            let before = self.token.span.shrink_to_lo();
+            while let token::Ident(..) = self.token.kind {
+                self.bump();
+            }
+            err.quote_ident_sugg = Some(errors::InvalidMetaItemQuoteIdentSugg {
+                before,
+                after: self.prev_token.span.shrink_to_hi(),
+            });
+        }
+
+        Err(self.dcx().create_err(err))
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
new file mode 100644
index 00000000000..44fdf146f9c
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -0,0 +1,407 @@
+use std::borrow::Cow;
+use std::mem;
+
+use rustc_ast::token::Token;
+use rustc_ast::tokenstream::{
+    AttrsTarget, LazyAttrTokenStream, NodeRange, ParserRange, Spacing, TokenCursor,
+};
+use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::PResult;
+use rustc_session::parse::ParseSess;
+use rustc_span::{DUMMY_SP, sym};
+use thin_vec::ThinVec;
+
+use super::{Capturing, ForceCollect, Parser, Trailing};
+
+// When collecting tokens, this fully captures the start point. Usually its
+// just after outer attributes, but occasionally it's before.
+#[derive(Clone, Debug)]
+pub(super) struct CollectPos {
+    start_token: (Token, Spacing),
+    cursor_snapshot: TokenCursor,
+    start_pos: u32,
+}
+
+pub(super) enum UsePreAttrPos {
+    No,
+    Yes,
+}
+
+/// A wrapper type to ensure that the parser handles outer attributes correctly.
+/// When we parse outer attributes, we need to ensure that we capture tokens
+/// for the attribute target. This allows us to perform cfg-expansion on
+/// a token stream before we invoke a derive proc-macro.
+///
+/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
+/// Parsing code can only get access to the underlying attributes
+/// by passing an `AttrWrapper` to `collect_tokens`.
+/// This makes it difficult to accidentally construct an AST node
+/// (which stores an `ast::AttrVec`) without first collecting tokens.
+///
+/// This struct has its own module, to ensure that the parser code
+/// cannot directly access the `attrs` field.
+#[derive(Debug, Clone)]
+pub(super) struct AttrWrapper {
+    attrs: AttrVec,
+    // The start of the outer attributes in the parser's token stream.
+    // This lets us create a `NodeReplacement` for the entire attribute
+    // target, including outer attributes. `None` if there are no outer
+    // attributes.
+    start_pos: Option<u32>,
+}
+
+impl AttrWrapper {
+    pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
+        AttrWrapper { attrs, start_pos: Some(start_pos) }
+    }
+
+    pub(super) fn empty() -> AttrWrapper {
+        AttrWrapper { attrs: AttrVec::new(), start_pos: None }
+    }
+
+    pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
+        psess.dcx().span_delayed_bug(
+            self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
+            "AttrVec is taken for recovery but no error is produced",
+        );
+
+        self.attrs
+    }
+
+    /// Prepend `self.attrs` to `attrs`.
+    // FIXME: require passing an NT to prevent misuse of this method
+    pub(super) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
+        mem::swap(attrs, &mut self.attrs);
+        attrs.extend(self.attrs);
+    }
+
+    pub(super) fn is_empty(&self) -> bool {
+        self.attrs.is_empty()
+    }
+}
+
+/// Returns `true` if `attrs` contains a `cfg` or `cfg_attr` attribute
+fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
+    // NOTE: Builtin attributes like `cfg` and `cfg_attr` cannot be renamed via imports.
+    // Therefore, the absence of a literal `cfg` or `cfg_attr` guarantees that
+    // we don't need to do any eager expansion.
+    attrs.iter().any(|attr| {
+        attr.ident().is_some_and(|ident| ident.name == sym::cfg || ident.name == sym::cfg_attr)
+    })
+}
+
+impl<'a> Parser<'a> {
+    pub(super) fn collect_pos(&self) -> CollectPos {
+        CollectPos {
+            start_token: (self.token, self.token_spacing),
+            cursor_snapshot: self.token_cursor.clone(),
+            start_pos: self.num_bump_calls,
+        }
+    }
+
+    /// Parses code with `f`. If appropriate, it records the tokens (in
+    /// `LazyAttrTokenStream` form) that were parsed in the result, accessible
+    /// via the `HasTokens` trait. The `Trailing` part of the callback's
+    /// result indicates if an extra token should be captured, e.g. a comma or
+    /// semicolon. The `UsePreAttrPos` part of the callback's result indicates
+    /// if we should use `pre_attr_pos` as the collection start position (only
+    /// required in a few cases).
+    ///
+    /// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
+    /// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
+    /// details.
+    ///
+    /// `pre_attr_pos` is the position before the outer attributes (or the node
+    /// itself, if no outer attributes are present). It is only needed if `f`
+    /// can return `UsePreAttrPos::Yes`.
+    ///
+    /// Note: If your callback consumes an opening delimiter (including the
+    /// case where `self.token` is an opening delimiter on entry to this
+    /// function), you must also consume the corresponding closing delimiter.
+    /// E.g. you can consume `something ([{ }])` or `([{}])`, but not `([{}]`.
+    /// This restriction isn't a problem in practice, because parsed AST items
+    /// always have matching delimiters.
+    ///
+    /// The following example code will be used to explain things in comments
+    /// below. It has an outer attribute and an inner attribute. Parsing it
+    /// involves two calls to this method, one of which is indirectly
+    /// recursive.
+    /// ```ignore (fake attributes)
+    /// #[cfg_eval]                         // token pos
+    /// mod m {                             //   0.. 3
+    ///     #[cfg_attr(cond1, attr1)]       //   3..12
+    ///     fn g() {                        //  12..17
+    ///         #![cfg_attr(cond2, attr2)]  //  17..27
+    ///         let _x = 3;                 //  27..32
+    ///     }                               //  32..33
+    /// }                                   //  33..34
+    /// ```
+    pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
+        &mut self,
+        pre_attr_pos: Option<CollectPos>,
+        attrs: AttrWrapper,
+        force_collect: ForceCollect,
+        f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
+    ) -> PResult<'a, R> {
+        let possible_capture_mode = self.capture_cfg;
+
+        // We must collect if anything could observe the collected tokens, i.e.
+        // if any of the following conditions hold.
+        // - We are force collecting tokens (because force collection requires
+        //   tokens by definition).
+        let needs_collection = matches!(force_collect, ForceCollect::Yes)
+            // - Any of our outer attributes require tokens.
+            || needs_tokens(&attrs.attrs)
+            // - Our target supports custom inner attributes (custom
+            //   inner attribute invocation might require token capturing).
+            || R::SUPPORTS_CUSTOM_INNER_ATTRS
+            // - We are in "possible capture mode" (which requires tokens if
+            //   the parsed node has `#[cfg]` or `#[cfg_attr]` attributes).
+            || possible_capture_mode;
+        if !needs_collection {
+            return Ok(f(self, attrs.attrs)?.0);
+        }
+
+        let mut collect_pos = self.collect_pos();
+        let has_outer_attrs = !attrs.attrs.is_empty();
+        let parser_replacements_start = self.capture_state.parser_replacements.len();
+
+        // We set and restore `Capturing::Yes` on either side of the call to
+        // `f`, so we can distinguish the outermost call to `collect_tokens`
+        // (e.g. parsing `m` in the example above) from any inner (indirectly
+        // recursive) calls (e.g. parsing `g` in the example above). This
+        // distinction is used below and in `Parser::parse_inner_attributes`.
+        let (mut ret, capture_trailing, use_pre_attr_pos) = {
+            let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
+            let res = f(self, attrs.attrs);
+            self.capture_state.capturing = prev_capturing;
+            res?
+        };
+
+        // - `None`: Our target doesn't support tokens at all (e.g. `NtIdent`).
+        // - `Some(None)`: Our target supports tokens and has none.
+        // - `Some(Some(_))`: Our target already has tokens set (e.g. we've
+        //   parsed something like `#[my_attr] $item`).
+        let ret_can_hold_tokens = matches!(ret.tokens_mut(), Some(None));
+
+        // Ignore any attributes we've previously processed. This happens when
+        // an inner call to `collect_tokens` returns an AST node and then an
+        // outer call ends up with the same AST node without any additional
+        // wrapping layer.
+        let mut seen_indices = FxHashSet::default();
+        for (i, attr) in ret.attrs().iter().enumerate() {
+            let is_unseen = self.capture_state.seen_attrs.insert(attr.id);
+            if !is_unseen {
+                seen_indices.insert(i);
+            }
+        }
+        let ret_attrs: Cow<'_, [Attribute]> =
+            if seen_indices.is_empty() {
+                Cow::Borrowed(ret.attrs())
+            } else {
+                let ret_attrs =
+                    ret.attrs()
+                        .iter()
+                        .enumerate()
+                        .filter_map(|(i, attr)| {
+                            if seen_indices.contains(&i) { None } else { Some(attr.clone()) }
+                        })
+                        .collect();
+                Cow::Owned(ret_attrs)
+            };
+
+        // When we're not in "definite capture mode", then skip collecting and
+        // return early if `ret` doesn't support tokens or already has some.
+        //
+        // Note that this check is independent of `force_collect`. There's no
+        // need to collect tokens when we don't support tokens or already have
+        // tokens.
+        let definite_capture_mode = self.capture_cfg
+            && matches!(self.capture_state.capturing, Capturing::Yes)
+            && has_cfg_or_cfg_attr(&ret_attrs);
+        if !definite_capture_mode && !ret_can_hold_tokens {
+            return Ok(ret);
+        }
+
+        // This is similar to the `needs_collection` check at the start of this
+        // function, but now that we've parsed an AST node we have complete
+        // information available. (If we return early here that means the
+        // setup, such as cloning the token cursor, was unnecessary. That's
+        // hard to avoid.)
+        //
+        // We must collect if anything could observe the collected tokens, i.e.
+        // if any of the following conditions hold.
+        // - We are force collecting tokens.
+        let needs_collection = matches!(force_collect, ForceCollect::Yes)
+            // - Any of our outer *or* inner attributes require tokens.
+            //   (`attr.attrs` was just outer attributes, but `ret.attrs()` is
+            //   outer and inner attributes. So this check is more precise than
+            //   the earlier `needs_tokens` check, and we don't need to
+            //   check `R::SUPPORTS_CUSTOM_INNER_ATTRS`.)
+            || needs_tokens(&ret_attrs)
+            // - We are in "definite capture mode", which requires that there
+            //   are `#[cfg]` or `#[cfg_attr]` attributes. (During normal
+            //   non-`capture_cfg` parsing, we don't need any special capturing
+            //   for those attributes, because they're builtin.)
+            || definite_capture_mode;
+        if !needs_collection {
+            return Ok(ret);
+        }
+
+        // Replace the post-attribute collection start position with the
+        // pre-attribute position supplied, if `f` indicated it is necessary.
+        // (The caller is responsible for providing a non-`None` `pre_attr_pos`
+        // if this is a possibility.)
+        if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
+            collect_pos = pre_attr_pos.unwrap();
+        }
+
+        let parser_replacements_end = self.capture_state.parser_replacements.len();
+
+        assert!(
+            !(self.break_last_token > 0 && matches!(capture_trailing, Trailing::Yes)),
+            "Cannot have break_last_token > 0 and have trailing token"
+        );
+        assert!(self.break_last_token <= 2, "cannot break token more than twice");
+
+        let end_pos = self.num_bump_calls
+            + capture_trailing as u32
+            // If we "broke" the last token (e.g. breaking a `>>` token once into `>` + `>`, or
+            // breaking a `>>=` token twice into `>` + `>` + `=`), then extend the range of
+            // captured tokens to include it, because the parser was not actually bumped past it.
+            // (Even if we broke twice, it was still just one token originally, hence the `1`.)
+            // When the `LazyAttrTokenStream` gets converted into an `AttrTokenStream`, we will
+            // rebreak that final token once or twice.
+            + if self.break_last_token == 0 { 0 } else { 1 };
+
+        let num_calls = end_pos - collect_pos.start_pos;
+
+        // Take the captured `ParserRange`s for any inner attributes that we parsed in
+        // `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
+        // which means the relevant tokens will be removed. (More details below.)
+        let mut inner_attr_parser_replacements = Vec::new();
+        for attr in ret_attrs.iter() {
+            if attr.style == ast::AttrStyle::Inner {
+                if let Some(inner_attr_parser_range) =
+                    self.capture_state.inner_attr_parser_ranges.remove(&attr.id)
+                {
+                    inner_attr_parser_replacements.push((inner_attr_parser_range, None));
+                } else {
+                    self.dcx().span_delayed_bug(attr.span, "Missing token range for attribute");
+                }
+            }
+        }
+
+        // This is hot enough for `deep-vector` that checking the conditions for an empty iterator
+        // is measurably faster than actually executing the iterator.
+        let node_replacements = if parser_replacements_start == parser_replacements_end
+            && inner_attr_parser_replacements.is_empty()
+        {
+            ThinVec::new()
+        } else {
+            // Grab any replace ranges that occur *inside* the current AST node. Convert them
+            // from `ParserRange` form to `NodeRange` form. We will perform the actual
+            // replacement only when we convert the `LazyAttrTokenStream` to an
+            // `AttrTokenStream`.
+            self.capture_state.parser_replacements
+                [parser_replacements_start..parser_replacements_end]
+                .iter()
+                .cloned()
+                .chain(inner_attr_parser_replacements)
+                .map(|(parser_range, data)| {
+                    (NodeRange::new(parser_range, collect_pos.start_pos), data)
+                })
+                .collect()
+        };
+
+        // What is the status here when parsing the example code at the top of this method?
+        //
+        // When parsing `g`:
+        // - `start_pos..end_pos` is `12..33` (`fn g { ... }`, excluding the outer attr).
+        // - `inner_attr_parser_replacements` has one entry (`ParserRange(17..27)`), to
+        //   delete the inner attr's tokens.
+        //   - This entry is converted to `NodeRange(5..15)` (relative to the `fn`) and put into
+        //     the lazy tokens for `g`, i.e. deleting the inner attr from those tokens (if they get
+        //     evaluated).
+        //   - Those lazy tokens are also put into an `AttrsTarget` that is appended to `self`'s
+        //     replace ranges at the bottom of this function, for processing when parsing `m`.
+        // - `parser_replacements_start..parser_replacements_end` is empty.
+        //
+        // When parsing `m`:
+        // - `start_pos..end_pos` is `0..34` (`mod m`, excluding the `#[cfg_eval]` attribute).
+        // - `inner_attr_parser_replacements` is empty.
+        // - `parser_replacements_start..parser_replacements_end` has one entry.
+        //   - One `AttrsTarget` (added below when parsing `g`) to replace all of `g` (`3..33`,
+        //     including its outer attribute), with:
+        //     - `attrs`: includes the outer and the inner attr.
+        //     - `tokens`: lazy tokens for `g` (with its inner attr deleted).
+
+        let tokens = LazyAttrTokenStream::new_pending(
+            collect_pos.start_token,
+            collect_pos.cursor_snapshot,
+            num_calls,
+            self.break_last_token,
+            node_replacements,
+        );
+        let mut tokens_used = false;
+
+        // If in "definite capture mode" we need to register a replace range
+        // for the `#[cfg]` and/or `#[cfg_attr]` attrs. This allows us to run
+        // eager cfg-expansion on the captured token stream.
+        if definite_capture_mode {
+            assert!(self.break_last_token == 0, "Should not have unglued last token with cfg attr");
+
+            // What is the status here when parsing the example code at the top of this method?
+            //
+            // When parsing `g`, we add one entry:
+            // - The pushed entry (`ParserRange(3..33)`) has a new `AttrsTarget` with:
+            //   - `attrs`: includes the outer and the inner attr.
+            //   - `tokens`: lazy tokens for `g` (with its inner attr deleted).
+            //
+            // When parsing `m`, we do nothing here.
+
+            // Set things up so that the entire AST node that we just parsed, including attributes,
+            // will be replaced with `target` in the lazy token stream. This will allow us to
+            // cfg-expand this AST node.
+            let start_pos =
+                if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
+            let target =
+                AttrsTarget { attrs: ret_attrs.iter().cloned().collect(), tokens: tokens.clone() };
+            tokens_used = true;
+            self.capture_state
+                .parser_replacements
+                .push((ParserRange(start_pos..end_pos), Some(target)));
+        } else if matches!(self.capture_state.capturing, Capturing::No) {
+            // Only clear the ranges once we've finished capturing entirely, i.e. we've finished
+            // the outermost call to this method.
+            self.capture_state.parser_replacements.clear();
+            self.capture_state.inner_attr_parser_ranges.clear();
+            self.capture_state.seen_attrs.clear();
+        }
+
+        // If we support tokens and don't already have them, store the newly captured tokens.
+        if let Some(target_tokens @ None) = ret.tokens_mut() {
+            tokens_used = true;
+            *target_tokens = Some(tokens);
+        }
+
+        assert!(tokens_used); // check we didn't create `tokens` unnecessarily
+        Ok(ret)
+    }
+}
+
+/// Tokens are needed if:
+/// - any non-single-segment attributes (other than doc comments) are present,
+///   e.g. `rustfmt::skip`; or
+/// - any `cfg_attr` attributes are present; or
+/// - any single-segment, non-builtin attributes are present, e.g. `derive`,
+///   `test`, `global_allocator`.
+fn needs_tokens(attrs: &[ast::Attribute]) -> bool {
+    attrs.iter().any(|attr| match attr.ident() {
+        None => !attr.is_doc_comment(),
+        Some(ident) => {
+            ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
+        }
+    })
+}
diff --git a/compiler/rustc_parse/src/parser/cfg_select.rs b/compiler/rustc_parse/src/parser/cfg_select.rs
new file mode 100644
index 00000000000..2c6fb224d70
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/cfg_select.rs
@@ -0,0 +1,75 @@
+use rustc_ast::token::Token;
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::{MetaItemInner, token};
+use rustc_errors::PResult;
+use rustc_span::Span;
+
+use crate::exp;
+use crate::parser::Parser;
+
+pub enum CfgSelectPredicate {
+    Cfg(MetaItemInner),
+    Wildcard(Token),
+}
+
+#[derive(Default)]
+pub struct CfgSelectBranches {
+    /// All the conditional branches.
+    pub reachable: Vec<(MetaItemInner, TokenStream, Span)>,
+    /// The first wildcard `_ => { ... }` branch.
+    pub wildcard: Option<(Token, TokenStream, Span)>,
+    /// All branches after the first wildcard, including further wildcards.
+    /// These branches are kept for formatting.
+    pub unreachable: Vec<(CfgSelectPredicate, TokenStream, Span)>,
+}
+
+/// Parses a `TokenTree` that must be of the form `{ /* ... */ }`, and returns a `TokenStream` where
+/// the surrounding braces are stripped.
+fn parse_token_tree<'a>(p: &mut Parser<'a>) -> PResult<'a, TokenStream> {
+    // Generate an error if the `=>` is not followed by `{`.
+    if p.token != token::OpenBrace {
+        p.expect(exp!(OpenBrace))?;
+    }
+
+    // Strip the outer '{' and '}'.
+    match p.parse_token_tree() {
+        TokenTree::Token(..) => unreachable!("because of the expect above"),
+        TokenTree::Delimited(.., tts) => Ok(tts),
+    }
+}
+
+pub fn parse_cfg_select<'a>(p: &mut Parser<'a>) -> PResult<'a, CfgSelectBranches> {
+    let mut branches = CfgSelectBranches::default();
+
+    while p.token != token::Eof {
+        if p.eat_keyword(exp!(Underscore)) {
+            let underscore = p.prev_token;
+            p.expect(exp!(FatArrow))?;
+
+            let tts = parse_token_tree(p)?;
+            let span = underscore.span.to(p.token.span);
+
+            match branches.wildcard {
+                None => branches.wildcard = Some((underscore, tts, span)),
+                Some(_) => {
+                    branches.unreachable.push((CfgSelectPredicate::Wildcard(underscore), tts, span))
+                }
+            }
+        } else {
+            let meta_item = p.parse_meta_item_inner()?;
+            p.expect(exp!(FatArrow))?;
+
+            let tts = parse_token_tree(p)?;
+            let span = meta_item.span().to(p.token.span);
+
+            match branches.wildcard {
+                None => branches.reachable.push((meta_item, tts, span)),
+                Some(_) => {
+                    branches.unreachable.push((CfgSelectPredicate::Cfg(meta_item), tts, span))
+                }
+            }
+        }
+    }
+
+    Ok(branches)
+}
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
new file mode 100644
index 00000000000..e0f810d8c1e
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -0,0 +1,3108 @@
+use std::mem::take;
+use std::ops::{Deref, DerefMut};
+
+use ast::token::IdentIsRaw;
+use rustc_ast as ast;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, Lit, LitKind, Token, TokenKind};
+use rustc_ast::util::parser::AssocOp;
+use rustc_ast::{
+    AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
+    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
+    Path, PathSegment, QSelf, Recovered, Ty, TyKind,
+};
+use rustc_ast_pretty::pprust;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::{
+    Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, PResult, Subdiagnostic, Suggestions,
+    pluralize,
+};
+use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::edit_distance::find_best_match_for_name;
+use rustc_span::source_map::Spanned;
+use rustc_span::symbol::used_keywords;
+use rustc_span::{BytePos, DUMMY_SP, Ident, Span, SpanSnippetError, Symbol, kw, sym};
+use thin_vec::{ThinVec, thin_vec};
+use tracing::{debug, trace};
+
+use super::pat::Expected;
+use super::{
+    BlockMode, CommaRecoveryMode, ExpTokenPair, Parser, PathStyle, Restrictions, SemiColonMode,
+    SeqSep, TokenType,
+};
+use crate::errors::{
+    AddParen, AmbiguousPlus, AsyncMoveBlockIn2015, AsyncUseBlockIn2015, AttributeOnParamType,
+    AwaitSuggestion, BadQPathStage2, BadTypePlus, BadTypePlusSub, ColonAsSemi,
+    ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
+    ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything,
+    DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
+    GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
+    HelpIdentifierStartsWithNumber, HelpUseLatestEdition, InInTypo, IncorrectAwait,
+    IncorrectSemicolon, IncorrectUseOfAwait, IncorrectUseOfUse, PatternMethodParamWithoutBody,
+    QuestionMarkInType, QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
+    StructLiteralBodyWithoutPathSugg, SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma,
+    TernaryOperator, TernaryOperatorSuggestion, UnexpectedConstInGenericParam,
+    UnexpectedConstParamDeclaration, UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets,
+    UseEqInstead, WrapType,
+};
+use crate::parser::attr::InnerAttrPolicy;
+use crate::{exp, fluent_generated as fluent};
+
+/// Creates a placeholder argument.
+pub(super) fn dummy_arg(ident: Ident, guar: ErrorGuaranteed) -> Param {
+    let pat = P(Pat {
+        id: ast::DUMMY_NODE_ID,
+        kind: PatKind::Ident(BindingMode::NONE, ident, None),
+        span: ident.span,
+        tokens: None,
+    });
+    let ty = Ty { kind: TyKind::Err(guar), span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None };
+    Param {
+        attrs: AttrVec::default(),
+        id: ast::DUMMY_NODE_ID,
+        pat,
+        span: ident.span,
+        ty: P(ty),
+        is_placeholder: false,
+    }
+}
+
+pub(super) trait RecoverQPath: Sized + 'static {
+    const PATH_STYLE: PathStyle = PathStyle::Expr;
+    fn to_ty(&self) -> Option<P<Ty>>;
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self;
+}
+
+impl RecoverQPath for Ty {
+    const PATH_STYLE: PathStyle = PathStyle::Type;
+    fn to_ty(&self) -> Option<P<Ty>> {
+        Some(P(self.clone()))
+    }
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
+        Self {
+            span: path.span,
+            kind: TyKind::Path(qself, path),
+            id: ast::DUMMY_NODE_ID,
+            tokens: None,
+        }
+    }
+}
+
+impl RecoverQPath for Pat {
+    const PATH_STYLE: PathStyle = PathStyle::Pat;
+    fn to_ty(&self) -> Option<P<Ty>> {
+        self.to_ty()
+    }
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
+        Self {
+            span: path.span,
+            kind: PatKind::Path(qself, path),
+            id: ast::DUMMY_NODE_ID,
+            tokens: None,
+        }
+    }
+}
+
+impl RecoverQPath for Expr {
+    fn to_ty(&self) -> Option<P<Ty>> {
+        self.to_ty()
+    }
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
+        Self {
+            span: path.span,
+            kind: ExprKind::Path(qself, path),
+            attrs: AttrVec::new(),
+            id: ast::DUMMY_NODE_ID,
+            tokens: None,
+        }
+    }
+}
+
+/// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`.
+pub(crate) enum ConsumeClosingDelim {
+    Yes,
+    No,
+}
+
+#[derive(Clone, Copy)]
+pub enum AttemptLocalParseRecovery {
+    Yes,
+    No,
+}
+
+impl AttemptLocalParseRecovery {
+    pub(super) fn yes(&self) -> bool {
+        match self {
+            AttemptLocalParseRecovery::Yes => true,
+            AttemptLocalParseRecovery::No => false,
+        }
+    }
+
+    pub(super) fn no(&self) -> bool {
+        match self {
+            AttemptLocalParseRecovery::Yes => false,
+            AttemptLocalParseRecovery::No => true,
+        }
+    }
+}
+
+/// Information for emitting suggestions and recovering from
+/// C-style `i++`, `--i`, etc.
+#[derive(Debug, Copy, Clone)]
+struct IncDecRecovery {
+    /// Is this increment/decrement its own statement?
+    standalone: IsStandalone,
+    /// Is this an increment or decrement?
+    op: IncOrDec,
+    /// Is this pre- or postfix?
+    fixity: UnaryFixity,
+}
+
+/// Is an increment or decrement expression its own statement?
+#[derive(Debug, Copy, Clone)]
+enum IsStandalone {
+    /// It's standalone, i.e., its own statement.
+    Standalone,
+    /// It's a subexpression, i.e., *not* standalone.
+    Subexpr,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+enum IncOrDec {
+    Inc,
+    Dec,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+enum UnaryFixity {
+    Pre,
+    Post,
+}
+
+impl IncOrDec {
+    fn chr(&self) -> char {
+        match self {
+            Self::Inc => '+',
+            Self::Dec => '-',
+        }
+    }
+
+    fn name(&self) -> &'static str {
+        match self {
+            Self::Inc => "increment",
+            Self::Dec => "decrement",
+        }
+    }
+}
+
+impl std::fmt::Display for UnaryFixity {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            Self::Pre => write!(f, "prefix"),
+            Self::Post => write!(f, "postfix"),
+        }
+    }
+}
+
+#[derive(Debug, rustc_macros::Subdiagnostic)]
+#[suggestion(
+    parse_misspelled_kw,
+    applicability = "machine-applicable",
+    code = "{similar_kw}",
+    style = "verbose"
+)]
+struct MisspelledKw {
+    similar_kw: String,
+    #[primary_span]
+    span: Span,
+    is_incorrect_case: bool,
+}
+
+/// Checks if the given `lookup` identifier is similar to any keyword symbol in `candidates`.
+fn find_similar_kw(lookup: Ident, candidates: &[Symbol]) -> Option<MisspelledKw> {
+    let lowercase = lookup.name.as_str().to_lowercase();
+    let lowercase_sym = Symbol::intern(&lowercase);
+    if candidates.contains(&lowercase_sym) {
+        Some(MisspelledKw { similar_kw: lowercase, span: lookup.span, is_incorrect_case: true })
+    } else if let Some(similar_sym) = find_best_match_for_name(candidates, lookup.name, None) {
+        Some(MisspelledKw {
+            similar_kw: similar_sym.to_string(),
+            span: lookup.span,
+            is_incorrect_case: false,
+        })
+    } else {
+        None
+    }
+}
+
+struct MultiSugg {
+    msg: String,
+    patches: Vec<(Span, String)>,
+    applicability: Applicability,
+}
+
+impl MultiSugg {
+    fn emit(self, err: &mut Diag<'_>) {
+        err.multipart_suggestion(self.msg, self.patches, self.applicability);
+    }
+
+    fn emit_verbose(self, err: &mut Diag<'_>) {
+        err.multipart_suggestion_verbose(self.msg, self.patches, self.applicability);
+    }
+}
+
+/// SnapshotParser is used to create a snapshot of the parser
+/// without causing duplicate errors being emitted when the `Parser`
+/// is dropped.
+pub struct SnapshotParser<'a> {
+    parser: Parser<'a>,
+}
+
+impl<'a> Deref for SnapshotParser<'a> {
+    type Target = Parser<'a>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.parser
+    }
+}
+
+impl<'a> DerefMut for SnapshotParser<'a> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.parser
+    }
+}
+
+impl<'a> Parser<'a> {
+    pub fn dcx(&self) -> DiagCtxtHandle<'a> {
+        self.psess.dcx()
+    }
+
+    /// Replace `self` with `snapshot.parser`.
+    pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) {
+        *self = snapshot.parser;
+    }
+
+    /// Create a snapshot of the `Parser`.
+    pub fn create_snapshot_for_diagnostic(&self) -> SnapshotParser<'a> {
+        let snapshot = self.clone();
+        SnapshotParser { parser: snapshot }
+    }
+
+    pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
+        self.psess.source_map().span_to_snippet(span)
+    }
+
+    /// Emits an error with suggestions if an identifier was expected but not found.
+    ///
+    /// Returns a possibly recovered identifier.
+    pub(super) fn expected_ident_found(
+        &mut self,
+        recover: bool,
+    ) -> PResult<'a, (Ident, IdentIsRaw)> {
+        let valid_follow = &[
+            TokenKind::Eq,
+            TokenKind::Colon,
+            TokenKind::Comma,
+            TokenKind::Semi,
+            TokenKind::PathSep,
+            TokenKind::OpenBrace,
+            TokenKind::OpenParen,
+            TokenKind::CloseBrace,
+            TokenKind::CloseParen,
+        ];
+        if let TokenKind::DocComment(..) = self.prev_token.kind
+            && valid_follow.contains(&self.token.kind)
+        {
+            let err = self.dcx().create_err(DocCommentDoesNotDocumentAnything {
+                span: self.prev_token.span,
+                missing_comma: None,
+            });
+            return Err(err);
+        }
+
+        let mut recovered_ident = None;
+        // we take this here so that the correct original token is retained in
+        // the diagnostic, regardless of eager recovery.
+        let bad_token = self.token;
+
+        // suggest prepending a keyword in identifier position with `r#`
+        let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
+            && ident.is_raw_guess()
+            && self.look_ahead(1, |t| valid_follow.contains(&t.kind))
+        {
+            recovered_ident = Some((ident, IdentIsRaw::Yes));
+
+            // `Symbol::to_string()` is different from `Symbol::into_diag_arg()`,
+            // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
+            let ident_name = ident.name.to_string();
+
+            Some(SuggEscapeIdentifier { span: ident.span.shrink_to_lo(), ident_name })
+        } else {
+            None
+        };
+
+        let suggest_remove_comma =
+            if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
+                if recover {
+                    self.bump();
+                    recovered_ident = self.ident_or_err(false).ok();
+                };
+
+                Some(SuggRemoveComma { span: bad_token.span })
+            } else {
+                None
+            };
+
+        let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
+            let (invalid, valid) = self.token.span.split_at(len as u32);
+
+            recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No));
+
+            HelpIdentifierStartsWithNumber { num_span: invalid }
+        });
+
+        let err = ExpectedIdentifier {
+            span: bad_token.span,
+            token: bad_token,
+            suggest_raw,
+            suggest_remove_comma,
+            help_cannot_start_number,
+        };
+        let mut err = self.dcx().create_err(err);
+
+        // if the token we have is a `<`
+        // it *might* be a misplaced generic
+        // FIXME: could we recover with this?
+        if self.token == token::Lt {
+            // all keywords that could have generic applied
+            let valid_prev_keywords =
+                [kw::Fn, kw::Type, kw::Struct, kw::Enum, kw::Union, kw::Trait];
+
+            // If we've expected an identifier,
+            // and the current token is a '<'
+            // if the previous token is a valid keyword
+            // that might use a generic, then suggest a correct
+            // generic placement (later on)
+            let maybe_keyword = self.prev_token;
+            if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
+                // if we have a valid keyword, attempt to parse generics
+                // also obtain the keywords symbol
+                match self.parse_generics() {
+                    Ok(generic) => {
+                        if let TokenKind::Ident(symbol, _) = maybe_keyword.kind {
+                            let ident_name = symbol;
+                            // at this point, we've found something like
+                            // `fn <T>id`
+                            // and current token should be Ident with the item name (i.e. the function name)
+                            // if there is a `<` after the fn name, then don't show a suggestion, show help
+
+                            if !self.look_ahead(1, |t| *t == token::Lt)
+                                && let Ok(snippet) =
+                                    self.psess.source_map().span_to_snippet(generic.span)
+                            {
+                                err.multipart_suggestion_verbose(
+                                        format!("place the generic parameter name after the {ident_name} name"),
+                                        vec![
+                                            (self.token.span.shrink_to_hi(), snippet),
+                                            (generic.span, String::new())
+                                        ],
+                                        Applicability::MaybeIncorrect,
+                                    );
+                            } else {
+                                err.help(format!(
+                                    "place the generic parameter name after the {ident_name} name"
+                                ));
+                            }
+                        }
+                    }
+                    Err(err) => {
+                        // if there's an error parsing the generics,
+                        // then don't do a misplaced generics suggestion
+                        // and emit the expected ident error instead;
+                        err.cancel();
+                    }
+                }
+            }
+        }
+
+        if let Some(recovered_ident) = recovered_ident
+            && recover
+        {
+            err.emit();
+            Ok(recovered_ident)
+        } else {
+            Err(err)
+        }
+    }
+
+    pub(super) fn expected_ident_found_err(&mut self) -> Diag<'a> {
+        self.expected_ident_found(false).unwrap_err()
+    }
+
+    /// Checks if the current token is a integer or float literal and looks like
+    /// it could be a invalid identifier with digits at the start.
+    ///
+    /// Returns the number of characters (bytes) composing the invalid portion
+    /// of the identifier and the valid portion of the identifier.
+    pub(super) fn is_lit_bad_ident(&mut self) -> Option<(usize, Symbol)> {
+        // ensure that the integer literal is followed by a *invalid*
+        // suffix: this is how we know that it is a identifier with an
+        // invalid beginning.
+        if let token::Literal(Lit {
+            kind: token::LitKind::Integer | token::LitKind::Float,
+            symbol,
+            suffix: Some(suffix), // no suffix makes it a valid literal
+        }) = self.token.kind
+            && rustc_ast::MetaItemLit::from_token(&self.token).is_none()
+        {
+            Some((symbol.as_str().len(), suffix))
+        } else {
+            None
+        }
+    }
+
+    pub(super) fn expected_one_of_not_found(
+        &mut self,
+        edible: &[ExpTokenPair<'_>],
+        inedible: &[ExpTokenPair<'_>],
+    ) -> PResult<'a, ErrorGuaranteed> {
+        debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
+        fn tokens_to_string(tokens: &[TokenType]) -> String {
+            let mut i = tokens.iter();
+            // This might be a sign we need a connect method on `Iterator`.
+            let b = i.next().map_or_else(String::new, |t| t.to_string());
+            i.enumerate().fold(b, |mut b, (i, a)| {
+                if tokens.len() > 2 && i == tokens.len() - 2 {
+                    b.push_str(", or ");
+                } else if tokens.len() == 2 && i == tokens.len() - 2 {
+                    b.push_str(" or ");
+                } else {
+                    b.push_str(", ");
+                }
+                b.push_str(&a.to_string());
+                b
+            })
+        }
+
+        for exp in edible.iter().chain(inedible.iter()) {
+            self.expected_token_types.insert(exp.token_type);
+        }
+        let mut expected: Vec<_> = self.expected_token_types.iter().collect();
+        expected.sort_by_cached_key(|x| x.to_string());
+        expected.dedup();
+
+        let sm = self.psess.source_map();
+
+        // Special-case "expected `;`" errors.
+        if expected.contains(&TokenType::Semi) {
+            // If the user is trying to write a ternary expression, recover it and
+            // return an Err to prevent a cascade of irrelevant diagnostics.
+            if self.prev_token == token::Question
+                && let Err(e) = self.maybe_recover_from_ternary_operator(None)
+            {
+                return Err(e);
+            }
+
+            if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
+                // Likely inside a macro, can't provide meaningful suggestions.
+            } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
+                // The current token is in the same line as the prior token, not recoverable.
+            } else if [token::Comma, token::Colon].contains(&self.token.kind)
+                && self.prev_token == token::CloseParen
+            {
+                // Likely typo: The current token is on a new line and is expected to be
+                // `.`, `;`, `?`, or an operator after a close delimiter token.
+                //
+                // let a = std::process::Command::new("echo")
+                //         .arg("1")
+                //         ,arg("2")
+                //         ^
+                // https://github.com/rust-lang/rust/issues/72253
+            } else if self.look_ahead(1, |t| {
+                t == &token::CloseBrace || t.can_begin_expr() && *t != token::Colon
+            }) && [token::Comma, token::Colon].contains(&self.token.kind)
+            {
+                // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
+                // either `,` or `:`, and the next token could either start a new statement or is a
+                // block close. For example:
+                //
+                //   let x = 32:
+                //   let y = 42;
+                let guar = self.dcx().emit_err(ExpectedSemi {
+                    span: self.token.span,
+                    token: self.token,
+                    unexpected_token_label: None,
+                    sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
+                });
+                self.bump();
+                return Ok(guar);
+            } else if self.look_ahead(0, |t| {
+                t == &token::CloseBrace
+                    || ((t.can_begin_expr() || t.can_begin_item())
+                        && t != &token::Semi
+                        && t != &token::Pound)
+                    // Avoid triggering with too many trailing `#` in raw string.
+                    || (sm.is_multiline(
+                        self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()),
+                    ) && t == &token::Pound)
+            }) && !expected.contains(&TokenType::Comma)
+            {
+                // Missing semicolon typo. This is triggered if the next token could either start a
+                // new statement or is a block close. For example:
+                //
+                //   let x = 32
+                //   let y = 42;
+                let span = self.prev_token.span.shrink_to_hi();
+                let guar = self.dcx().emit_err(ExpectedSemi {
+                    span,
+                    token: self.token,
+                    unexpected_token_label: Some(self.token.span),
+                    sugg: ExpectedSemiSugg::AddSemi(span),
+                });
+                return Ok(guar);
+            }
+        }
+
+        if self.token == TokenKind::EqEq
+            && self.prev_token.is_ident()
+            && expected.contains(&TokenType::Eq)
+        {
+            // Likely typo: `=` → `==` in let expr or enum item
+            return Err(self.dcx().create_err(UseEqInstead { span: self.token.span }));
+        }
+
+        if (self.token.is_keyword(kw::Move) || self.token.is_keyword(kw::Use))
+            && self.prev_token.is_keyword(kw::Async)
+        {
+            // The 2015 edition is in use because parsing of `async move` or `async use` has failed.
+            let span = self.prev_token.span.to(self.token.span);
+            if self.token.is_keyword(kw::Move) {
+                return Err(self.dcx().create_err(AsyncMoveBlockIn2015 { span }));
+            } else {
+                // kw::Use
+                return Err(self.dcx().create_err(AsyncUseBlockIn2015 { span }));
+            }
+        }
+
+        let expect = tokens_to_string(&expected);
+        let actual = super::token_descr(&self.token);
+        let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
+            let fmt = format!("expected one of {expect}, found {actual}");
+            let short_expect = if expected.len() > 6 {
+                format!("{} possible tokens", expected.len())
+            } else {
+                expect
+            };
+            (fmt, (self.prev_token.span.shrink_to_hi(), format!("expected one of {short_expect}")))
+        } else if expected.is_empty() {
+            (
+                format!("unexpected token: {actual}"),
+                (self.prev_token.span, "unexpected token after this".to_string()),
+            )
+        } else {
+            (
+                format!("expected {expect}, found {actual}"),
+                (self.prev_token.span.shrink_to_hi(), format!("expected {expect}")),
+            )
+        };
+        self.last_unexpected_token_span = Some(self.token.span);
+        // FIXME: translation requires list formatting (for `expect`)
+        let mut err = self.dcx().struct_span_err(self.token.span, msg_exp);
+
+        self.label_expected_raw_ref(&mut err);
+
+        // Look for usages of '=>' where '>=' was probably intended
+        if self.token == token::FatArrow
+            && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le))
+            && !expected.iter().any(|tok| matches!(tok, TokenType::FatArrow | TokenType::Comma))
+        {
+            err.span_suggestion(
+                self.token.span,
+                "you might have meant to write a \"greater than or equal to\" comparison",
+                ">=",
+                Applicability::MaybeIncorrect,
+            );
+        }
+
+        if let TokenKind::Ident(symbol, _) = &self.prev_token.kind {
+            if ["def", "fun", "func", "function"].contains(&symbol.as_str()) {
+                err.span_suggestion_short(
+                    self.prev_token.span,
+                    format!("write `fn` instead of `{symbol}` to declare a function"),
+                    "fn",
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+
+        if let TokenKind::Ident(prev, _) = &self.prev_token.kind
+            && let TokenKind::Ident(cur, _) = &self.token.kind
+        {
+            let concat = Symbol::intern(&format!("{prev}{cur}"));
+            let ident = Ident::new(concat, DUMMY_SP);
+            if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
+                let concat_span = self.prev_token.span.to(self.token.span);
+                err.span_suggestion_verbose(
+                    concat_span,
+                    format!("consider removing the space to spell keyword `{concat}`"),
+                    concat,
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+
+        // Try to detect an intended c-string literal while using a pre-2021 edition. The heuristic
+        // here is to identify a cooked, uninterpolated `c` id immediately followed by a string, or
+        // a cooked, uninterpolated `cr` id immediately followed by a string or a `#`, in an edition
+        // where c-string literals are not allowed. There is the very slight possibility of a false
+        // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
+        // that in the parser requires unbounded lookahead, so we only add a hint to the existing
+        // error rather than replacing it entirely.
+        if ((self.prev_token == TokenKind::Ident(sym::c, IdentIsRaw::No)
+            && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
+            || (self.prev_token == TokenKind::Ident(sym::cr, IdentIsRaw::No)
+                && matches!(
+                    &self.token.kind,
+                    TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
+                )))
+            && self.prev_token.span.hi() == self.token.span.lo()
+            && !self.token.span.at_least_rust_2021()
+        {
+            err.note("you may be trying to write a c-string literal");
+            err.note("c-string literals require Rust 2021 or later");
+            err.subdiagnostic(HelpUseLatestEdition::new());
+        }
+
+        // `pub` may be used for an item or `pub(crate)`
+        if self.prev_token.is_ident_named(sym::public)
+            && (self.token.can_begin_item() || self.token == TokenKind::OpenParen)
+        {
+            err.span_suggestion_short(
+                self.prev_token.span,
+                "write `pub` instead of `public` to make the item public",
+                "pub",
+                Applicability::MachineApplicable,
+            );
+        }
+
+        if let token::DocComment(kind, style, _) = self.token.kind {
+            // This is to avoid suggesting converting a doc comment to a regular comment
+            // when missing a comma before the doc comment in lists (#142311):
+            //
+            // ```
+            // enum Foo{
+            //     A /// xxxxxxx
+            //     B,
+            // }
+            // ```
+            if !expected.contains(&TokenType::Comma) {
+                // We have something like `expr //!val` where the user likely meant `expr // !val`
+                let pos = self.token.span.lo() + BytePos(2);
+                let span = self.token.span.with_lo(pos).with_hi(pos);
+                err.span_suggestion_verbose(
+                    span,
+                    format!(
+                        "add a space before {} to write a regular comment",
+                        match (kind, style) {
+                            (token::CommentKind::Line, ast::AttrStyle::Inner) => "`!`",
+                            (token::CommentKind::Block, ast::AttrStyle::Inner) => "`!`",
+                            (token::CommentKind::Line, ast::AttrStyle::Outer) => "the last `/`",
+                            (token::CommentKind::Block, ast::AttrStyle::Outer) => "the last `*`",
+                        },
+                    ),
+                    " ".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+        }
+
+        let sp = if self.token == token::Eof {
+            // This is EOF; don't want to point at the following char, but rather the last token.
+            self.prev_token.span
+        } else {
+            label_sp
+        };
+
+        if self.check_too_many_raw_str_terminators(&mut err) {
+            if expected.contains(&TokenType::Semi) && self.eat(exp!(Semi)) {
+                let guar = err.emit();
+                return Ok(guar);
+            } else {
+                return Err(err);
+            }
+        }
+
+        if self.prev_token.span == DUMMY_SP {
+            // Account for macro context where the previous span might not be
+            // available to avoid incorrect output (#54841).
+            err.span_label(self.token.span, label_exp);
+        } else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) {
+            // When the spans are in the same line, it means that the only content between
+            // them is whitespace, point at the found token in that case:
+            //
+            // X |     () => { syntax error };
+            //   |                    ^^^^^ expected one of 8 possible tokens here
+            //
+            // instead of having:
+            //
+            // X |     () => { syntax error };
+            //   |                   -^^^^^ unexpected token
+            //   |                   |
+            //   |                   expected one of 8 possible tokens here
+            err.span_label(self.token.span, label_exp);
+        } else {
+            err.span_label(sp, label_exp);
+            err.span_label(self.token.span, "unexpected token");
+        }
+
+        // Check for misspelled keywords if there are no suggestions added to the diagnostic.
+        if matches!(&err.suggestions, Suggestions::Enabled(list) if list.is_empty()) {
+            self.check_for_misspelled_kw(&mut err, &expected);
+        }
+        Err(err)
+    }
+
+    /// Adds a label when `&raw EXPR` was written instead of `&raw const EXPR`/`&raw mut EXPR`.
+    ///
+    /// Given that not all parser diagnostics flow through `expected_one_of_not_found`, this
+    /// label may need added to other diagnostics emission paths as needed.
+    pub(super) fn label_expected_raw_ref(&mut self, err: &mut Diag<'_>) {
+        if self.prev_token.is_keyword(kw::Raw)
+            && self.expected_token_types.contains(TokenType::KwMut)
+            && self.expected_token_types.contains(TokenType::KwConst)
+            && self.token.can_begin_expr()
+        {
+            err.span_suggestions(
+                self.prev_token.span.shrink_to_hi(),
+                "`&raw` must be followed by `const` or `mut` to be a raw reference expression",
+                [" const".to_string(), " mut".to_string()],
+                Applicability::MaybeIncorrect,
+            );
+        }
+    }
+
+    /// Checks if the current token or the previous token are misspelled keywords
+    /// and adds a helpful suggestion.
+    fn check_for_misspelled_kw(&self, err: &mut Diag<'_>, expected: &[TokenType]) {
+        let Some((curr_ident, _)) = self.token.ident() else {
+            return;
+        };
+        let expected_token_types: &[TokenType] =
+            expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]);
+        let expected_keywords: Vec<Symbol> =
+            expected_token_types.iter().filter_map(|token| token.is_keyword()).collect();
+
+        // When there are a few keywords in the last ten elements of `self.expected_token_types`
+        // and the current token is an identifier, it's probably a misspelled keyword. This handles
+        // code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in
+        // `if`-`else` and misspelled `where` in a where clause.
+        if !expected_keywords.is_empty()
+            && !curr_ident.is_used_keyword()
+            && let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords)
+        {
+            err.subdiagnostic(misspelled_kw);
+            // We don't want other suggestions to be added as they are most likely meaningless
+            // when there is a misspelled keyword.
+            err.seal_suggestions();
+        } else if let Some((prev_ident, _)) = self.prev_token.ident()
+            && !prev_ident.is_used_keyword()
+        {
+            // We generate a list of all keywords at runtime rather than at compile time
+            // so that it gets generated only when the diagnostic needs it.
+            // Also, it is unlikely that this list is generated multiple times because the
+            // parser halts after execution hits this path.
+            let all_keywords = used_keywords(|| prev_ident.span.edition());
+
+            // Otherwise, check the previous token with all the keywords as possible candidates.
+            // This handles code like `Struct Human;` and `While a < b {}`.
+            // We check the previous token only when the current token is an identifier to avoid
+            // false positives like suggesting keyword `for` for `extern crate foo {}`.
+            if let Some(misspelled_kw) = find_similar_kw(prev_ident, &all_keywords) {
+                err.subdiagnostic(misspelled_kw);
+                // We don't want other suggestions to be added as they are most likely meaningless
+                // when there is a misspelled keyword.
+                err.seal_suggestions();
+            }
+        }
+    }
+
+    /// The user has written `#[attr] expr` which is unsupported. (#106020)
+    pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) -> ErrorGuaranteed {
+        // Missing semicolon typo error.
+        let span = self.prev_token.span.shrink_to_hi();
+        let mut err = self.dcx().create_err(ExpectedSemi {
+            span,
+            token: self.token,
+            unexpected_token_label: Some(self.token.span),
+            sugg: ExpectedSemiSugg::AddSemi(span),
+        });
+        let attr_span = match &expr.attrs[..] {
+            [] => unreachable!(),
+            [only] => only.span,
+            [first, rest @ ..] => {
+                for attr in rest {
+                    err.span_label(attr.span, "");
+                }
+                first.span
+            }
+        };
+        err.span_label(
+            attr_span,
+            format!(
+                "only `;` terminated statements or tail expressions are allowed after {}",
+                if expr.attrs.len() == 1 { "this attribute" } else { "these attributes" },
+            ),
+        );
+        if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) {
+            // We have
+            // #[attr]
+            // expr
+            // #[not_attr]
+            // other_expr
+            err.span_label(span, "expected `;` here");
+            err.multipart_suggestion(
+                "alternatively, consider surrounding the expression with a block",
+                vec![
+                    (expr.span.shrink_to_lo(), "{ ".to_string()),
+                    (expr.span.shrink_to_hi(), " }".to_string()),
+                ],
+                Applicability::MachineApplicable,
+            );
+
+            // Special handling for `#[cfg(...)]` chains
+            let mut snapshot = self.create_snapshot_for_diagnostic();
+            if let [attr] = &expr.attrs[..]
+                && let ast::AttrKind::Normal(attr_kind) = &attr.kind
+                && let [segment] = &attr_kind.item.path.segments[..]
+                && segment.ident.name == sym::cfg
+                && let Some(args_span) = attr_kind.item.args.span()
+                && let next_attr = match snapshot.parse_attribute(InnerAttrPolicy::Forbidden(None))
+                {
+                    Ok(next_attr) => next_attr,
+                    Err(inner_err) => {
+                        inner_err.cancel();
+                        return err.emit();
+                    }
+                }
+                && let ast::AttrKind::Normal(next_attr_kind) = next_attr.kind
+                && let Some(next_attr_args_span) = next_attr_kind.item.args.span()
+                && let [next_segment] = &next_attr_kind.item.path.segments[..]
+                && segment.ident.name == sym::cfg
+            {
+                let next_expr = match snapshot.parse_expr() {
+                    Ok(next_expr) => next_expr,
+                    Err(inner_err) => {
+                        inner_err.cancel();
+                        return err.emit();
+                    }
+                };
+                // We have for sure
+                // #[cfg(..)]
+                // expr
+                // #[cfg(..)]
+                // other_expr
+                // So we suggest using `if cfg!(..) { expr } else if cfg!(..) { other_expr }`.
+                let margin = self.psess.source_map().span_to_margin(next_expr.span).unwrap_or(0);
+                let sugg = vec![
+                    (attr.span.with_hi(segment.span().hi()), "if cfg!".to_string()),
+                    (args_span.shrink_to_hi().with_hi(attr.span.hi()), " {".to_string()),
+                    (expr.span.shrink_to_lo(), "    ".to_string()),
+                    (
+                        next_attr.span.with_hi(next_segment.span().hi()),
+                        "} else if cfg!".to_string(),
+                    ),
+                    (
+                        next_attr_args_span.shrink_to_hi().with_hi(next_attr.span.hi()),
+                        " {".to_string(),
+                    ),
+                    (next_expr.span.shrink_to_lo(), "    ".to_string()),
+                    (next_expr.span.shrink_to_hi(), format!("\n{}}}", " ".repeat(margin))),
+                ];
+                err.multipart_suggestion(
+                    "it seems like you are trying to provide different expressions depending on \
+                     `cfg`, consider using `if cfg!(..)`",
+                    sugg,
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+
+        err.emit()
+    }
+
+    fn check_too_many_raw_str_terminators(&mut self, err: &mut Diag<'_>) -> bool {
+        let sm = self.psess.source_map();
+        match (&self.prev_token.kind, &self.token.kind) {
+            (
+                TokenKind::Literal(Lit {
+                    kind: LitKind::StrRaw(n_hashes) | LitKind::ByteStrRaw(n_hashes),
+                    ..
+                }),
+                TokenKind::Pound,
+            ) if !sm.is_multiline(
+                self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()),
+            ) =>
+            {
+                let n_hashes: u8 = *n_hashes;
+                err.primary_message("too many `#` when terminating raw string");
+                let str_span = self.prev_token.span;
+                let mut span = self.token.span;
+                let mut count = 0;
+                while self.token == TokenKind::Pound
+                    && !sm.is_multiline(span.shrink_to_hi().until(self.token.span.shrink_to_lo()))
+                {
+                    span = span.with_hi(self.token.span.hi());
+                    self.bump();
+                    count += 1;
+                }
+                err.span(span);
+                err.span_suggestion(
+                    span,
+                    format!("remove the extra `#`{}", pluralize!(count)),
+                    "",
+                    Applicability::MachineApplicable,
+                );
+                err.span_label(
+                    str_span,
+                    format!("this raw string started with {n_hashes} `#`{}", pluralize!(n_hashes)),
+                );
+                true
+            }
+            _ => false,
+        }
+    }
+
+    pub(super) fn maybe_suggest_struct_literal(
+        &mut self,
+        lo: Span,
+        s: BlockCheckMode,
+        maybe_struct_name: token::Token,
+    ) -> Option<PResult<'a, P<Block>>> {
+        if self.token.is_ident() && self.look_ahead(1, |t| t == &token::Colon) {
+            // We might be having a struct literal where people forgot to include the path:
+            // fn foo() -> Foo {
+            //     field: value,
+            // }
+            debug!(?maybe_struct_name, ?self.token);
+            let mut snapshot = self.create_snapshot_for_diagnostic();
+            let path = Path {
+                segments: ThinVec::new(),
+                span: self.prev_token.span.shrink_to_lo(),
+                tokens: None,
+            };
+            let struct_expr = snapshot.parse_expr_struct(None, path, false);
+            let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
+            return Some(match (struct_expr, block_tail) {
+                (Ok(expr), Err(err)) => {
+                    // We have encountered the following:
+                    // fn foo() -> Foo {
+                    //     field: value,
+                    // }
+                    // Suggest:
+                    // fn foo() -> Foo { Path {
+                    //     field: value,
+                    // } }
+                    err.cancel();
+                    self.restore_snapshot(snapshot);
+                    let guar = self.dcx().emit_err(StructLiteralBodyWithoutPath {
+                        span: expr.span,
+                        sugg: StructLiteralBodyWithoutPathSugg {
+                            before: expr.span.shrink_to_lo(),
+                            after: expr.span.shrink_to_hi(),
+                        },
+                    });
+                    Ok(self.mk_block(
+                        thin_vec![self.mk_stmt_err(expr.span, guar)],
+                        s,
+                        lo.to(self.prev_token.span),
+                    ))
+                }
+                (Err(err), Ok(tail)) => {
+                    // We have a block tail that contains a somehow valid expr.
+                    err.cancel();
+                    Ok(tail)
+                }
+                (Err(snapshot_err), Err(err)) => {
+                    // We don't know what went wrong, emit the normal error.
+                    snapshot_err.cancel();
+                    self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
+                    Err(err)
+                }
+                (Ok(_), Ok(tail)) => Ok(tail),
+            });
+        }
+        None
+    }
+
+    pub(super) fn recover_closure_body(
+        &mut self,
+        mut err: Diag<'a>,
+        before: token::Token,
+        prev: token::Token,
+        token: token::Token,
+        lo: Span,
+        decl_hi: Span,
+    ) -> PResult<'a, P<Expr>> {
+        err.span_label(lo.to(decl_hi), "while parsing the body of this closure");
+        let guar = match before.kind {
+            token::OpenBrace if token.kind != token::OpenBrace => {
+                // `{ || () }` should have been `|| { () }`
+                err.multipart_suggestion(
+                    "you might have meant to open the body of the closure, instead of enclosing \
+                     the closure in a block",
+                    vec![
+                        (before.span, String::new()),
+                        (prev.span.shrink_to_hi(), " {".to_string()),
+                    ],
+                    Applicability::MaybeIncorrect,
+                );
+                let guar = err.emit();
+                self.eat_to_tokens(&[exp!(CloseBrace)]);
+                guar
+            }
+            token::OpenParen if token.kind != token::OpenBrace => {
+                // We are within a function call or tuple, we can emit the error
+                // and recover.
+                self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]);
+
+                err.multipart_suggestion_verbose(
+                    "you might have meant to open the body of the closure",
+                    vec![
+                        (prev.span.shrink_to_hi(), " {".to_string()),
+                        (self.token.span.shrink_to_lo(), "}".to_string()),
+                    ],
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit()
+            }
+            _ if token.kind != token::OpenBrace => {
+                // We don't have a heuristic to correctly identify where the block
+                // should be closed.
+                err.multipart_suggestion_verbose(
+                    "you might have meant to open the body of the closure",
+                    vec![(prev.span.shrink_to_hi(), " {".to_string())],
+                    Applicability::HasPlaceholders,
+                );
+                return Err(err);
+            }
+            _ => return Err(err),
+        };
+        Ok(self.mk_expr_err(lo.to(self.token.span), guar))
+    }
+
+    /// Eats and discards tokens until one of `closes` is encountered. Respects token trees,
+    /// passes through any errors encountered. Used for error recovery.
+    pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) {
+        if let Err(err) = self
+            .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
+        {
+            err.cancel();
+        }
+    }
+
+    /// This function checks if there are trailing angle brackets and produces
+    /// a diagnostic to suggest removing them.
+    ///
+    /// ```ignore (diagnostic)
+    /// let _ = [1, 2, 3].into_iter().collect::<Vec<usize>>>>();
+    ///                                                    ^^ help: remove extra angle brackets
+    /// ```
+    ///
+    /// If `true` is returned, then trailing brackets were recovered, tokens were consumed
+    /// up until one of the tokens in 'end' was encountered, and an error was emitted.
+    pub(super) fn check_trailing_angle_brackets(
+        &mut self,
+        segment: &PathSegment,
+        end: &[ExpTokenPair<'_>],
+    ) -> Option<ErrorGuaranteed> {
+        if !self.may_recover() {
+            return None;
+        }
+
+        // This function is intended to be invoked after parsing a path segment where there are two
+        // cases:
+        //
+        // 1. A specific token is expected after the path segment.
+        //    eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
+        //        `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
+        // 2. No specific token is expected after the path segment.
+        //    eg. `x.foo` (field access)
+        //
+        // This function is called after parsing `.foo` and before parsing the token `end` (if
+        // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
+        // `Foo::<Bar>`.
+
+        // We only care about trailing angle brackets if we previously parsed angle bracket
+        // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
+        // removed in this case:
+        //
+        // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
+        //
+        // This case is particularly tricky as we won't notice it just looking at the tokens -
+        // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
+        // have already been parsed):
+        //
+        // `x.foo::<u32>>>(3)`
+        let parsed_angle_bracket_args =
+            segment.args.as_ref().is_some_and(|args| args.is_angle_bracketed());
+
+        debug!(
+            "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
+            parsed_angle_bracket_args,
+        );
+        if !parsed_angle_bracket_args {
+            return None;
+        }
+
+        // Keep the span at the start so we can highlight the sequence of `>` characters to be
+        // removed.
+        let lo = self.token.span;
+
+        // We need to look-ahead to see if we have `>` characters without moving the cursor forward
+        // (since we might have the field access case and the characters we're eating are
+        // actual operators and not trailing characters - ie `x.foo >> 3`).
+        let mut position = 0;
+
+        // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
+        // many of each (so we can correctly pluralize our error messages) and continue to
+        // advance.
+        let mut number_of_shr = 0;
+        let mut number_of_gt = 0;
+        while self.look_ahead(position, |t| {
+            trace!("check_trailing_angle_brackets: t={:?}", t);
+            if *t == token::Shr {
+                number_of_shr += 1;
+                true
+            } else if *t == token::Gt {
+                number_of_gt += 1;
+                true
+            } else {
+                false
+            }
+        }) {
+            position += 1;
+        }
+
+        // If we didn't find any trailing `>` characters, then we have nothing to error about.
+        debug!(
+            "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
+            number_of_gt, number_of_shr,
+        );
+        if number_of_gt < 1 && number_of_shr < 1 {
+            return None;
+        }
+
+        // Finally, double check that we have our end token as otherwise this is the
+        // second case.
+        if self.look_ahead(position, |t| {
+            trace!("check_trailing_angle_brackets: t={:?}", t);
+            end.iter().any(|exp| exp.tok == &t.kind)
+        }) {
+            // Eat from where we started until the end token so that parsing can continue
+            // as if we didn't have those extra angle brackets.
+            self.eat_to_tokens(end);
+            let span = lo.to(self.prev_token.span);
+
+            let num_extra_brackets = number_of_gt + number_of_shr * 2;
+            return Some(self.dcx().emit_err(UnmatchedAngleBrackets { span, num_extra_brackets }));
+        }
+        None
+    }
+
+    /// Check if a method call with an intended turbofish has been written without surrounding
+    /// angle brackets.
+    pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
+        if !self.may_recover() {
+            return;
+        }
+
+        if self.token == token::PathSep && segment.args.is_none() {
+            let snapshot = self.create_snapshot_for_diagnostic();
+            self.bump();
+            let lo = self.token.span;
+            match self.parse_angle_args(None) {
+                Ok(args) => {
+                    let span = lo.to(self.prev_token.span);
+                    // Detect trailing `>` like in `x.collect::Vec<_>>()`.
+                    let mut trailing_span = self.prev_token.span.shrink_to_hi();
+                    while self.token == token::Shr || self.token == token::Gt {
+                        trailing_span = trailing_span.to(self.token.span);
+                        self.bump();
+                    }
+                    if self.token == token::OpenParen {
+                        // Recover from bad turbofish: `foo.collect::Vec<_>()`.
+                        segment.args = Some(AngleBracketedArgs { args, span }.into());
+
+                        self.dcx().emit_err(GenericParamsWithoutAngleBrackets {
+                            span,
+                            sugg: GenericParamsWithoutAngleBracketsSugg {
+                                left: span.shrink_to_lo(),
+                                right: trailing_span,
+                            },
+                        });
+                    } else {
+                        // This doesn't look like an invalid turbofish, can't recover parse state.
+                        self.restore_snapshot(snapshot);
+                    }
+                }
+                Err(err) => {
+                    // We couldn't parse generic parameters, unlikely to be a turbofish. Rely on
+                    // generic parse error instead.
+                    err.cancel();
+                    self.restore_snapshot(snapshot);
+                }
+            }
+        }
+    }
+
+    /// When writing a turbofish with multiple type parameters missing the leading `::`, we will
+    /// encounter a parse error when encountering the first `,`.
+    pub(super) fn check_mistyped_turbofish_with_multiple_type_params(
+        &mut self,
+        mut e: Diag<'a>,
+        expr: &mut P<Expr>,
+    ) -> PResult<'a, ErrorGuaranteed> {
+        if let ExprKind::Binary(binop, _, _) = &expr.kind
+            && let ast::BinOpKind::Lt = binop.node
+            && self.eat(exp!(Comma))
+        {
+            let x = self.parse_seq_to_before_end(
+                exp!(Gt),
+                SeqSep::trailing_allowed(exp!(Comma)),
+                |p| match p.parse_generic_arg(None)? {
+                    Some(arg) => Ok(arg),
+                    // If we didn't eat a generic arg, then we should error.
+                    None => p.unexpected_any(),
+                },
+            );
+            match x {
+                Ok((_, _, Recovered::No)) => {
+                    if self.eat(exp!(Gt)) {
+                        // We made sense of it. Improve the error message.
+                        e.span_suggestion_verbose(
+                            binop.span.shrink_to_lo(),
+                            fluent::parse_sugg_turbofish_syntax,
+                            "::",
+                            Applicability::MaybeIncorrect,
+                        );
+                        match self.parse_expr() {
+                            Ok(_) => {
+                                // The subsequent expression is valid. Mark
+                                // `expr` as erroneous and emit `e` now, but
+                                // return `Ok` so parsing can continue.
+                                let guar = e.emit();
+                                *expr = self.mk_expr_err(expr.span.to(self.prev_token.span), guar);
+                                return Ok(guar);
+                            }
+                            Err(err) => {
+                                err.cancel();
+                            }
+                        }
+                    }
+                }
+                Ok((_, _, Recovered::Yes(_))) => {}
+                Err(err) => {
+                    err.cancel();
+                }
+            }
+        }
+        Err(e)
+    }
+
+    /// Suggest add the missing `let` before the identifier in stmt
+    /// `a: Ty = 1` -> `let a: Ty = 1`
+    pub(super) fn suggest_add_missing_let_for_stmt(&mut self, err: &mut Diag<'a>) {
+        if self.token == token::Colon {
+            let prev_span = self.prev_token.span.shrink_to_lo();
+            let snapshot = self.create_snapshot_for_diagnostic();
+            self.bump();
+            match self.parse_ty() {
+                Ok(_) => {
+                    if self.token == token::Eq {
+                        let sugg = SuggAddMissingLetStmt { span: prev_span };
+                        sugg.add_to_diag(err);
+                    }
+                }
+                Err(e) => {
+                    e.cancel();
+                }
+            }
+            self.restore_snapshot(snapshot);
+        }
+    }
+
+    /// Check to see if a pair of chained operators looks like an attempt at chained comparison,
+    /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
+    /// parenthesising the leftmost comparison. The return value indicates if recovery happened.
+    fn attempt_chained_comparison_suggestion(
+        &mut self,
+        err: &mut ComparisonOperatorsCannotBeChained,
+        inner_op: &Expr,
+        outer_op: &Spanned<AssocOp>,
+    ) -> bool {
+        if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
+            if let ExprKind::Field(_, ident) = l1.kind
+                && !ident.is_numeric()
+                && !matches!(r1.kind, ExprKind::Lit(_))
+            {
+                // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
+                // suggestion being the only one to apply is high.
+                return false;
+            }
+            return match (op.node, &outer_op.node) {
+                // `x == y == z`
+                (BinOpKind::Eq, AssocOp::Binary(BinOpKind::Eq)) |
+                // `x < y < z` and friends.
+                (BinOpKind::Lt, AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le)) |
+                (BinOpKind::Le, AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le)) |
+                // `x > y > z` and friends.
+                (BinOpKind::Gt, AssocOp::Binary(BinOpKind::Gt | BinOpKind::Ge)) |
+                (BinOpKind::Ge, AssocOp::Binary(BinOpKind::Gt | BinOpKind::Ge)) => {
+                    let expr_to_str = |e: &Expr| {
+                        self.span_to_snippet(e.span)
+                            .unwrap_or_else(|_| pprust::expr_to_string(e))
+                    };
+                    err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::SplitComparison {
+                        span: inner_op.span.shrink_to_hi(),
+                        middle_term: expr_to_str(r1),
+                    });
+                    false // Keep the current parse behavior, where the AST is `(x < y) < z`.
+                }
+                // `x == y < z`
+                (
+                    BinOpKind::Eq,
+                    AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge)
+                ) => {
+                    // Consume `z`/outer-op-rhs.
+                    let snapshot = self.create_snapshot_for_diagnostic();
+                    match self.parse_expr() {
+                        Ok(r2) => {
+                            // We are sure that outer-op-rhs could be consumed, the suggestion is
+                            // likely correct.
+                            err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::Parenthesize {
+                                left: r1.span.shrink_to_lo(),
+                                right: r2.span.shrink_to_hi(),
+                            });
+                            true
+                        }
+                        Err(expr_err) => {
+                            expr_err.cancel();
+                            self.restore_snapshot(snapshot);
+                            true
+                        }
+                    }
+                }
+                // `x > y == z`
+                (
+                    BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge,
+                    AssocOp::Binary(BinOpKind::Eq)
+                ) => {
+                    let snapshot = self.create_snapshot_for_diagnostic();
+                    // At this point it is always valid to enclose the lhs in parentheses, no
+                    // further checks are necessary.
+                    match self.parse_expr() {
+                        Ok(_) => {
+                            err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::Parenthesize {
+                                left: l1.span.shrink_to_lo(),
+                                right: r1.span.shrink_to_hi(),
+                            });
+                            true
+                        }
+                        Err(expr_err) => {
+                            expr_err.cancel();
+                            self.restore_snapshot(snapshot);
+                            false
+                        }
+                    }
+                }
+                _ => false
+            };
+        }
+        false
+    }
+
+    /// Produces an error if comparison operators are chained (RFC #558).
+    /// We only need to check the LHS, not the RHS, because all comparison ops have same
+    /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
+    ///
+    /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
+    /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
+    /// case.
+    ///
+    /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
+    /// associative we can infer that we have:
+    ///
+    /// ```text
+    ///           outer_op
+    ///           /   \
+    ///     inner_op   r2
+    ///        /  \
+    ///      l1    r1
+    /// ```
+    pub(super) fn check_no_chained_comparison(
+        &mut self,
+        inner_op: &Expr,
+        outer_op: &Spanned<AssocOp>,
+    ) -> PResult<'a, Option<P<Expr>>> {
+        debug_assert!(
+            outer_op.node.is_comparison(),
+            "check_no_chained_comparison: {:?} is not comparison",
+            outer_op.node,
+        );
+
+        let mk_err_expr =
+            |this: &Self, span, guar| Ok(Some(this.mk_expr(span, ExprKind::Err(guar))));
+
+        match &inner_op.kind {
+            ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => {
+                let mut err = ComparisonOperatorsCannotBeChained {
+                    span: vec![op.span, self.prev_token.span],
+                    suggest_turbofish: None,
+                    help_turbofish: false,
+                    chaining_sugg: None,
+                };
+
+                // Include `<` to provide this recommendation even in a case like
+                // `Foo<Bar<Baz<Qux, ()>>>`
+                if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Binary(BinOpKind::Lt)
+                    || outer_op.node == AssocOp::Binary(BinOpKind::Gt)
+                {
+                    if outer_op.node == AssocOp::Binary(BinOpKind::Lt) {
+                        let snapshot = self.create_snapshot_for_diagnostic();
+                        self.bump();
+                        // So far we have parsed `foo<bar<`, consume the rest of the type args.
+                        let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)];
+                        self.consume_tts(1, &modifiers);
+
+                        if !matches!(self.token.kind, token::OpenParen | token::PathSep) {
+                            // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
+                            // parser and bail out.
+                            self.restore_snapshot(snapshot);
+                        }
+                    }
+                    return if self.token == token::PathSep {
+                        // We have some certainty that this was a bad turbofish at this point.
+                        // `foo< bar >::`
+                        if let ExprKind::Binary(o, ..) = inner_op.kind
+                            && o.node == BinOpKind::Lt
+                        {
+                            err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        } else {
+                            err.help_turbofish = true;
+                        }
+
+                        let snapshot = self.create_snapshot_for_diagnostic();
+                        self.bump(); // `::`
+
+                        // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
+                        match self.parse_expr() {
+                            Ok(_) => {
+                                // 99% certain that the suggestion is correct, continue parsing.
+                                let guar = self.dcx().emit_err(err);
+                                // FIXME: actually check that the two expressions in the binop are
+                                // paths and resynthesize new fn call expression instead of using
+                                // `ExprKind::Err` placeholder.
+                                mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
+                            }
+                            Err(expr_err) => {
+                                expr_err.cancel();
+                                // Not entirely sure now, but we bubble the error up with the
+                                // suggestion.
+                                self.restore_snapshot(snapshot);
+                                Err(self.dcx().create_err(err))
+                            }
+                        }
+                    } else if self.token == token::OpenParen {
+                        // We have high certainty that this was a bad turbofish at this point.
+                        // `foo< bar >(`
+                        if let ExprKind::Binary(o, ..) = inner_op.kind
+                            && o.node == BinOpKind::Lt
+                        {
+                            err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        } else {
+                            err.help_turbofish = true;
+                        }
+                        // Consume the fn call arguments.
+                        match self.consume_fn_args() {
+                            Err(()) => Err(self.dcx().create_err(err)),
+                            Ok(()) => {
+                                let guar = self.dcx().emit_err(err);
+                                // FIXME: actually check that the two expressions in the binop are
+                                // paths and resynthesize new fn call expression instead of using
+                                // `ExprKind::Err` placeholder.
+                                mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
+                            }
+                        }
+                    } else {
+                        if !matches!(l1.kind, ExprKind::Lit(_))
+                            && !matches!(r1.kind, ExprKind::Lit(_))
+                        {
+                            // All we know is that this is `foo < bar >` and *nothing* else. Try to
+                            // be helpful, but don't attempt to recover.
+                            err.help_turbofish = true;
+                        }
+
+                        // If it looks like a genuine attempt to chain operators (as opposed to a
+                        // misformatted turbofish, for instance), suggest a correct form.
+                        let recovered = self
+                            .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
+                        if recovered {
+                            let guar = self.dcx().emit_err(err);
+                            mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
+                        } else {
+                            // These cases cause too many knock-down errors, bail out (#61329).
+                            Err(self.dcx().create_err(err))
+                        }
+                    };
+                }
+                let recovered =
+                    self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
+                let guar = self.dcx().emit_err(err);
+                if recovered {
+                    return mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar);
+                }
+            }
+            _ => {}
+        }
+        Ok(None)
+    }
+
+    fn consume_fn_args(&mut self) -> Result<(), ()> {
+        let snapshot = self.create_snapshot_for_diagnostic();
+        self.bump(); // `(`
+
+        // Consume the fn call arguments.
+        let modifiers = [(token::OpenParen, 1), (token::CloseParen, -1)];
+        self.consume_tts(1, &modifiers);
+
+        if self.token == token::Eof {
+            // Not entirely sure that what we consumed were fn arguments, rollback.
+            self.restore_snapshot(snapshot);
+            Err(())
+        } else {
+            // 99% certain that the suggestion is correct, continue parsing.
+            Ok(())
+        }
+    }
+
+    pub(super) fn maybe_report_ambiguous_plus(&mut self, impl_dyn_multi: bool, ty: &Ty) {
+        if impl_dyn_multi {
+            self.dcx().emit_err(AmbiguousPlus {
+                span: ty.span,
+                suggestion: AddParen { lo: ty.span.shrink_to_lo(), hi: ty.span.shrink_to_hi() },
+            });
+        }
+    }
+
+    /// Swift lets users write `Ty?` to mean `Option<Ty>`. Parse the construct and recover from it.
+    pub(super) fn maybe_recover_from_question_mark(&mut self, ty: P<Ty>) -> P<Ty> {
+        if self.token == token::Question {
+            self.bump();
+            let guar = self.dcx().emit_err(QuestionMarkInType {
+                span: self.prev_token.span,
+                sugg: QuestionMarkInTypeSugg {
+                    left: ty.span.shrink_to_lo(),
+                    right: self.prev_token.span,
+                },
+            });
+            self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err(guar))
+        } else {
+            ty
+        }
+    }
+
+    /// Rust has no ternary operator (`cond ? then : else`). Parse it and try
+    /// to recover from it if `then` and `else` are valid expressions. Returns
+    /// an err if this appears to be a ternary expression.
+    /// If we have the span of the condition, we can provide a better error span
+    /// and code suggestion.
+    pub(super) fn maybe_recover_from_ternary_operator(
+        &mut self,
+        cond: Option<Span>,
+    ) -> PResult<'a, ()> {
+        if self.prev_token != token::Question {
+            return PResult::Ok(());
+        }
+
+        let question = self.prev_token.span;
+        let lo = cond.unwrap_or(question).lo();
+        let snapshot = self.create_snapshot_for_diagnostic();
+
+        if match self.parse_expr() {
+            Ok(_) => true,
+            Err(err) => {
+                err.cancel();
+                // The colon can sometimes be mistaken for type
+                // ascription. Catch when this happens and continue.
+                self.token == token::Colon
+            }
+        } {
+            if self.eat_noexpect(&token::Colon) {
+                let colon = self.prev_token.span;
+                match self.parse_expr() {
+                    Ok(expr) => {
+                        let sugg = cond.map(|cond| TernaryOperatorSuggestion {
+                            before_cond: cond.shrink_to_lo(),
+                            question,
+                            colon,
+                            end: expr.span.shrink_to_hi(),
+                        });
+                        return Err(self.dcx().create_err(TernaryOperator {
+                            span: self.prev_token.span.with_lo(lo),
+                            sugg,
+                            no_sugg: sugg.is_none(),
+                        }));
+                    }
+                    Err(err) => {
+                        err.cancel();
+                    }
+                };
+            }
+        }
+        self.restore_snapshot(snapshot);
+        Ok(())
+    }
+
+    pub(super) fn maybe_recover_from_bad_type_plus(&mut self, ty: &Ty) -> PResult<'a, ()> {
+        // Do not add `+` to expected tokens.
+        if !self.token.is_like_plus() {
+            return Ok(());
+        }
+
+        self.bump(); // `+`
+        let _bounds = self.parse_generic_bounds()?;
+        let sub = match &ty.kind {
+            TyKind::Ref(_lifetime, mut_ty) => {
+                let lo = mut_ty.ty.span.shrink_to_lo();
+                let hi = self.prev_token.span.shrink_to_hi();
+                BadTypePlusSub::AddParen { suggestion: AddParen { lo, hi } }
+            }
+            TyKind::Ptr(..) | TyKind::FnPtr(..) => {
+                BadTypePlusSub::ForgotParen { span: ty.span.to(self.prev_token.span) }
+            }
+            _ => BadTypePlusSub::ExpectPath { span: ty.span },
+        };
+
+        self.dcx().emit_err(BadTypePlus { span: ty.span, sub });
+
+        Ok(())
+    }
+
+    pub(super) fn recover_from_prefix_increment(
+        &mut self,
+        operand_expr: P<Expr>,
+        op_span: Span,
+        start_stmt: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let standalone = if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr };
+        let kind = IncDecRecovery { standalone, op: IncOrDec::Inc, fixity: UnaryFixity::Pre };
+        self.recover_from_inc_dec(operand_expr, kind, op_span)
+    }
+
+    pub(super) fn recover_from_postfix_increment(
+        &mut self,
+        operand_expr: P<Expr>,
+        op_span: Span,
+        start_stmt: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let kind = IncDecRecovery {
+            standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
+            op: IncOrDec::Inc,
+            fixity: UnaryFixity::Post,
+        };
+        self.recover_from_inc_dec(operand_expr, kind, op_span)
+    }
+
+    pub(super) fn recover_from_postfix_decrement(
+        &mut self,
+        operand_expr: P<Expr>,
+        op_span: Span,
+        start_stmt: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let kind = IncDecRecovery {
+            standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
+            op: IncOrDec::Dec,
+            fixity: UnaryFixity::Post,
+        };
+        self.recover_from_inc_dec(operand_expr, kind, op_span)
+    }
+
+    fn recover_from_inc_dec(
+        &mut self,
+        base: P<Expr>,
+        kind: IncDecRecovery,
+        op_span: Span,
+    ) -> PResult<'a, P<Expr>> {
+        let mut err = self.dcx().struct_span_err(
+            op_span,
+            format!("Rust has no {} {} operator", kind.fixity, kind.op.name()),
+        );
+        err.span_label(op_span, format!("not a valid {} operator", kind.fixity));
+
+        let help_base_case = |mut err: Diag<'_, _>, base| {
+            err.help(format!("use `{}= 1` instead", kind.op.chr()));
+            err.emit();
+            Ok(base)
+        };
+
+        // (pre, post)
+        let spans = match kind.fixity {
+            UnaryFixity::Pre => (op_span, base.span.shrink_to_hi()),
+            UnaryFixity::Post => (base.span.shrink_to_lo(), op_span),
+        };
+
+        match kind.standalone {
+            IsStandalone::Standalone => {
+                self.inc_dec_standalone_suggest(kind, spans).emit_verbose(&mut err)
+            }
+            IsStandalone::Subexpr => {
+                let Ok(base_src) = self.span_to_snippet(base.span) else {
+                    return help_base_case(err, base);
+                };
+                match kind.fixity {
+                    UnaryFixity::Pre => {
+                        self.prefix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
+                    }
+                    UnaryFixity::Post => {
+                        // won't suggest since we can not handle the precedences
+                        // for example: `a + b++` has been parsed (a + b)++ and we can not suggest here
+                        if !matches!(base.kind, ExprKind::Binary(_, _, _)) {
+                            self.postfix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
+                        }
+                    }
+                }
+            }
+        }
+        Err(err)
+    }
+
+    fn prefix_inc_dec_suggest(
+        &mut self,
+        base_src: String,
+        kind: IncDecRecovery,
+        (pre_span, post_span): (Span, Span),
+    ) -> MultiSugg {
+        MultiSugg {
+            msg: format!("use `{}= 1` instead", kind.op.chr()),
+            patches: vec![
+                (pre_span, "{ ".to_string()),
+                (post_span, format!(" {}= 1; {} }}", kind.op.chr(), base_src)),
+            ],
+            applicability: Applicability::MachineApplicable,
+        }
+    }
+
+    fn postfix_inc_dec_suggest(
+        &mut self,
+        base_src: String,
+        kind: IncDecRecovery,
+        (pre_span, post_span): (Span, Span),
+    ) -> MultiSugg {
+        let tmp_var = if base_src.trim() == "tmp" { "tmp_" } else { "tmp" };
+        MultiSugg {
+            msg: format!("use `{}= 1` instead", kind.op.chr()),
+            patches: vec![
+                (pre_span, format!("{{ let {tmp_var} = ")),
+                (post_span, format!("; {} {}= 1; {} }}", base_src, kind.op.chr(), tmp_var)),
+            ],
+            applicability: Applicability::HasPlaceholders,
+        }
+    }
+
+    fn inc_dec_standalone_suggest(
+        &mut self,
+        kind: IncDecRecovery,
+        (pre_span, post_span): (Span, Span),
+    ) -> MultiSugg {
+        let mut patches = Vec::new();
+
+        if !pre_span.is_empty() {
+            patches.push((pre_span, String::new()));
+        }
+
+        patches.push((post_span, format!(" {}= 1", kind.op.chr())));
+        MultiSugg {
+            msg: format!("use `{}= 1` instead", kind.op.chr()),
+            patches,
+            applicability: Applicability::MachineApplicable,
+        }
+    }
+
+    /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
+    /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
+    /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
+    pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
+        &mut self,
+        base: P<T>,
+    ) -> PResult<'a, P<T>> {
+        if !self.may_recover() {
+            return Ok(base);
+        }
+
+        // Do not add `::` to expected tokens.
+        if self.token == token::PathSep {
+            if let Some(ty) = base.to_ty() {
+                return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
+            }
+        }
+        Ok(base)
+    }
+
+    /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
+    /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
+    pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
+        &mut self,
+        ty_span: Span,
+        ty: P<Ty>,
+    ) -> PResult<'a, P<T>> {
+        self.expect(exp!(PathSep))?;
+
+        let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
+        self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
+        path.span = ty_span.to(self.prev_token.span);
+
+        self.dcx().emit_err(BadQPathStage2 {
+            span: ty_span,
+            wrap: WrapType { lo: ty_span.shrink_to_lo(), hi: ty_span.shrink_to_hi() },
+        });
+
+        let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
+        Ok(P(T::recovered(Some(P(QSelf { ty, path_span, position: 0 })), path)))
+    }
+
+    /// This function gets called in places where a semicolon is NOT expected and if there's a
+    /// semicolon it emits the appropriate error and returns true.
+    pub fn maybe_consume_incorrect_semicolon(&mut self, previous_item: Option<&Item>) -> bool {
+        if self.token != TokenKind::Semi {
+            return false;
+        }
+
+        // Check previous item to add it to the diagnostic, for example to say
+        // `enum declarations are not followed by a semicolon`
+        let err = match previous_item {
+            Some(previous_item) => {
+                let name = match previous_item.kind {
+                    // Say "braced struct" because tuple-structs and
+                    // braceless-empty-struct declarations do take a semicolon.
+                    ItemKind::Struct(..) => "braced struct",
+                    _ => previous_item.kind.descr(),
+                };
+                IncorrectSemicolon { span: self.token.span, name, show_help: true }
+            }
+            None => IncorrectSemicolon { span: self.token.span, name: "", show_help: false },
+        };
+        self.dcx().emit_err(err);
+
+        self.bump();
+        true
+    }
+
+    /// Creates a `Diag` for an unexpected token `t` and tries to recover if it is a
+    /// closing delimiter.
+    pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
+        let token_str = pprust::token_kind_to_string(t);
+        let this_token_str = super::token_descr(&self.token);
+        let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
+            // Point at the end of the macro call when reaching end of macro arguments.
+            (token::Eof, Some(_)) => {
+                let sp = self.prev_token.span.shrink_to_hi();
+                (sp, sp)
+            }
+            // We don't want to point at the following span after DUMMY_SP.
+            // This happens when the parser finds an empty TokenStream.
+            _ if self.prev_token.span == DUMMY_SP => (self.token.span, self.token.span),
+            // EOF, don't want to point at the following char, but rather the last token.
+            (token::Eof, None) => (self.prev_token.span, self.token.span),
+            _ => (self.prev_token.span.shrink_to_hi(), self.token.span),
+        };
+        let msg = format!(
+            "expected `{}`, found {}",
+            token_str,
+            match (&self.token.kind, self.subparser_name) {
+                (token::Eof, Some(origin)) => format!("end of {origin}"),
+                _ => this_token_str,
+            },
+        );
+        let mut err = self.dcx().struct_span_err(sp, msg);
+        let label_exp = format!("expected `{token_str}`");
+        let sm = self.psess.source_map();
+        if !sm.is_multiline(prev_sp.until(sp)) {
+            // When the spans are in the same line, it means that the only content
+            // between them is whitespace, point only at the found token.
+            err.span_label(sp, label_exp);
+        } else {
+            err.span_label(prev_sp, label_exp);
+            err.span_label(sp, "unexpected token");
+        }
+        Err(err)
+    }
+
+    pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
+        if self.eat(exp!(Semi)) || self.recover_colon_as_semi() {
+            return Ok(());
+        }
+        self.expect(exp!(Semi)).map(drop) // Error unconditionally
+    }
+
+    pub(super) fn recover_colon_as_semi(&mut self) -> bool {
+        let line_idx = |span: Span| {
+            self.psess
+                .source_map()
+                .span_to_lines(span)
+                .ok()
+                .and_then(|lines| Some(lines.lines.get(0)?.line_index))
+        };
+
+        if self.may_recover()
+            && self.token == token::Colon
+            && self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span))
+        {
+            self.dcx().emit_err(ColonAsSemi { span: self.token.span });
+            self.bump();
+            return true;
+        }
+
+        false
+    }
+
+    /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
+    /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
+    pub(super) fn recover_incorrect_await_syntax(
+        &mut self,
+        await_sp: Span,
+    ) -> PResult<'a, P<Expr>> {
+        let (hi, expr, is_question) = if self.token == token::Bang {
+            // Handle `await!(<expr>)`.
+            self.recover_await_macro()?
+        } else {
+            self.recover_await_prefix(await_sp)?
+        };
+        let (sp, guar) = self.error_on_incorrect_await(await_sp, hi, &expr, is_question);
+        let expr = self.mk_expr_err(await_sp.to(sp), guar);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
+        self.expect(exp!(Bang))?;
+        self.expect(exp!(OpenParen))?;
+        let expr = self.parse_expr()?;
+        self.expect(exp!(CloseParen))?;
+        Ok((self.prev_token.span, expr, false))
+    }
+
+    fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
+        let is_question = self.eat(exp!(Question)); // Handle `await? <expr>`.
+        let expr = if self.token == token::OpenBrace {
+            // Handle `await { <expr> }`.
+            // This needs to be handled separately from the next arm to avoid
+            // interpreting `await { <expr> }?` as `<expr>?.await`.
+            self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)
+        } else {
+            self.parse_expr()
+        }
+        .map_err(|mut err| {
+            err.span_label(await_sp, format!("while parsing this incorrect await expression"));
+            err
+        })?;
+        Ok((expr.span, expr, is_question))
+    }
+
+    fn error_on_incorrect_await(
+        &self,
+        lo: Span,
+        hi: Span,
+        expr: &Expr,
+        is_question: bool,
+    ) -> (Span, ErrorGuaranteed) {
+        let span = lo.to(hi);
+        let guar = self.dcx().emit_err(IncorrectAwait {
+            span,
+            suggestion: AwaitSuggestion {
+                removal: lo.until(expr.span),
+                dot_await: expr.span.shrink_to_hi(),
+                question_mark: if is_question { "?" } else { "" },
+            },
+        });
+        (span, guar)
+    }
+
+    /// If encountering `future.await()`, consumes and emits an error.
+    pub(super) fn recover_from_await_method_call(&mut self) {
+        if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) {
+            // future.await()
+            let lo = self.token.span;
+            self.bump(); // (
+            let span = lo.to(self.token.span);
+            self.bump(); // )
+
+            self.dcx().emit_err(IncorrectUseOfAwait { span });
+        }
+    }
+    ///
+    /// If encountering `x.use()`, consumes and emits an error.
+    pub(super) fn recover_from_use(&mut self) {
+        if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) {
+            // var.use()
+            let lo = self.token.span;
+            self.bump(); // (
+            let span = lo.to(self.token.span);
+            self.bump(); // )
+
+            self.dcx().emit_err(IncorrectUseOfUse { span });
+        }
+    }
+
+    pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
+        let is_try = self.token.is_keyword(kw::Try);
+        let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for !
+        let is_open = self.look_ahead(2, |t| t == &token::OpenParen); //check for (
+
+        if is_try && is_questionmark && is_open {
+            let lo = self.token.span;
+            self.bump(); //remove try
+            self.bump(); //remove !
+            let try_span = lo.to(self.token.span); //we take the try!( span
+            self.bump(); //remove (
+            let is_empty = self.token == token::CloseParen; //check if the block is empty
+            self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block
+            let hi = self.token.span;
+            self.bump(); //remove )
+            let mut err = self.dcx().struct_span_err(lo.to(hi), "use of deprecated `try` macro");
+            err.note("in the 2018 edition `try` is a reserved keyword, and the `try!()` macro is deprecated");
+            let prefix = if is_empty { "" } else { "alternatively, " };
+            if !is_empty {
+                err.multipart_suggestion(
+                    "you can use the `?` operator instead",
+                    vec![(try_span, "".to_owned()), (hi, "?".to_owned())],
+                    Applicability::MachineApplicable,
+                );
+            }
+            err.span_suggestion(lo.shrink_to_lo(), format!("{prefix}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax"), "r#", Applicability::MachineApplicable);
+            let guar = err.emit();
+            Ok(self.mk_expr_err(lo.to(hi), guar))
+        } else {
+            Err(self.expected_expression_found()) // The user isn't trying to invoke the try! macro
+        }
+    }
+
+    /// When trying to close a generics list and encountering code like
+    /// ```text
+    /// impl<S: Into<std::borrow::Cow<'static, str>> From<S> for Canonical {}
+    ///                                          // ^ missing > here
+    /// ```
+    /// we provide a structured suggestion on the error from `expect_gt`.
+    pub(super) fn expect_gt_or_maybe_suggest_closing_generics(
+        &mut self,
+        params: &[ast::GenericParam],
+    ) -> PResult<'a, ()> {
+        let Err(mut err) = self.expect_gt() else {
+            return Ok(());
+        };
+        // Attempt to find places where a missing `>` might belong.
+        if let [.., ast::GenericParam { bounds, .. }] = params
+            && let Some(poly) = bounds
+                .iter()
+                .filter_map(|bound| match bound {
+                    ast::GenericBound::Trait(poly) => Some(poly),
+                    _ => None,
+                })
+                .next_back()
+        {
+            err.span_suggestion_verbose(
+                poly.span.shrink_to_hi(),
+                "you might have meant to end the type parameters here",
+                ">",
+                Applicability::MaybeIncorrect,
+            );
+        }
+        Err(err)
+    }
+
+    pub(super) fn recover_seq_parse_error(
+        &mut self,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
+        lo: Span,
+        err: Diag<'a>,
+    ) -> P<Expr> {
+        let guar = err.emit();
+        // Recover from parse error, callers expect the closing delim to be consumed.
+        self.consume_block(open, close, ConsumeClosingDelim::Yes);
+        self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err(guar))
+    }
+
+    /// Eats tokens until we can be relatively sure we reached the end of the
+    /// statement. This is something of a best-effort heuristic.
+    ///
+    /// We terminate when we find an unmatched `}` (without consuming it).
+    pub(super) fn recover_stmt(&mut self) {
+        self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
+    }
+
+    /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
+    /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
+    /// approximate -- it can mean we break too early due to macros, but that
+    /// should only lead to sub-optimal recovery, not inaccurate parsing).
+    ///
+    /// If `break_on_block` is `Break`, then we will stop consuming tokens
+    /// after finding (and consuming) a brace-delimited block.
+    pub(super) fn recover_stmt_(
+        &mut self,
+        break_on_semi: SemiColonMode,
+        break_on_block: BlockMode,
+    ) {
+        let mut brace_depth = 0;
+        let mut bracket_depth = 0;
+        let mut in_block = false;
+        debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", break_on_semi, break_on_block);
+        loop {
+            debug!("recover_stmt_ loop {:?}", self.token);
+            match self.token.kind {
+                token::OpenBrace => {
+                    brace_depth += 1;
+                    self.bump();
+                    if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
+                    {
+                        in_block = true;
+                    }
+                }
+                token::OpenBracket => {
+                    bracket_depth += 1;
+                    self.bump();
+                }
+                token::CloseBrace => {
+                    if brace_depth == 0 {
+                        debug!("recover_stmt_ return - close delim {:?}", self.token);
+                        break;
+                    }
+                    brace_depth -= 1;
+                    self.bump();
+                    if in_block && bracket_depth == 0 && brace_depth == 0 {
+                        debug!("recover_stmt_ return - block end {:?}", self.token);
+                        break;
+                    }
+                }
+                token::CloseBracket => {
+                    bracket_depth -= 1;
+                    if bracket_depth < 0 {
+                        bracket_depth = 0;
+                    }
+                    self.bump();
+                }
+                token::Eof => {
+                    debug!("recover_stmt_ return - Eof");
+                    break;
+                }
+                token::Semi => {
+                    self.bump();
+                    if break_on_semi == SemiColonMode::Break
+                        && brace_depth == 0
+                        && bracket_depth == 0
+                    {
+                        debug!("recover_stmt_ return - Semi");
+                        break;
+                    }
+                }
+                token::Comma
+                    if break_on_semi == SemiColonMode::Comma
+                        && brace_depth == 0
+                        && bracket_depth == 0 =>
+                {
+                    break;
+                }
+                _ => self.bump(),
+            }
+        }
+    }
+
+    pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
+        if self.eat_keyword(exp!(In)) {
+            // a common typo: `for _ in in bar {}`
+            self.dcx().emit_err(InInTypo {
+                span: self.prev_token.span,
+                sugg_span: in_span.until(self.prev_token.span),
+            });
+        }
+    }
+
+    pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
+        if let token::DocComment(..) = self.token.kind {
+            self.dcx().emit_err(DocCommentOnParamType { span: self.token.span });
+            self.bump();
+        } else if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) {
+            let lo = self.token.span;
+            // Skip every token until next possible arg.
+            while self.token != token::CloseBracket {
+                self.bump();
+            }
+            let sp = lo.to(self.token.span);
+            self.bump();
+            self.dcx().emit_err(AttributeOnParamType { span: sp });
+        }
+    }
+
+    pub(super) fn parameter_without_type(
+        &mut self,
+        err: &mut Diag<'_>,
+        pat: P<ast::Pat>,
+        require_name: bool,
+        first_param: bool,
+    ) -> Option<Ident> {
+        // If we find a pattern followed by an identifier, it could be an (incorrect)
+        // C-style parameter declaration.
+        if self.check_ident()
+            && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseParen)
+        {
+            // `fn foo(String s) {}`
+            let ident = self.parse_ident().unwrap();
+            let span = pat.span.with_hi(ident.span.hi());
+
+            err.span_suggestion(
+                span,
+                "declare the type after the parameter binding",
+                "<identifier>: <type>",
+                Applicability::HasPlaceholders,
+            );
+            return Some(ident);
+        } else if require_name
+            && (self.token == token::Comma
+                || self.token == token::Lt
+                || self.token == token::CloseParen)
+        {
+            let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)";
+
+            let (ident, self_sugg, param_sugg, type_sugg, self_span, param_span, type_span) =
+                match pat.kind {
+                    PatKind::Ident(_, ident, _) => (
+                        ident,
+                        "self: ",
+                        ": TypeName".to_string(),
+                        "_: ",
+                        pat.span.shrink_to_lo(),
+                        pat.span.shrink_to_hi(),
+                        pat.span.shrink_to_lo(),
+                    ),
+                    // Also catches `fn foo(&a)`.
+                    PatKind::Ref(ref inner_pat, mutab)
+                        if let PatKind::Ident(_, ident, _) = inner_pat.clone().kind =>
+                    {
+                        let mutab = mutab.prefix_str();
+                        (
+                            ident,
+                            "self: ",
+                            format!("{ident}: &{mutab}TypeName"),
+                            "_: ",
+                            pat.span.shrink_to_lo(),
+                            pat.span,
+                            pat.span.shrink_to_lo(),
+                        )
+                    }
+                    _ => {
+                        // Otherwise, try to get a type and emit a suggestion.
+                        if let Some(_) = pat.to_ty() {
+                            err.span_suggestion_verbose(
+                                pat.span.shrink_to_lo(),
+                                "explicitly ignore the parameter name",
+                                "_: ".to_string(),
+                                Applicability::MachineApplicable,
+                            );
+                            err.note(rfc_note);
+                        }
+
+                        return None;
+                    }
+                };
+
+            // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
+            if first_param {
+                err.span_suggestion_verbose(
+                    self_span,
+                    "if this is a `self` type, give it a parameter name",
+                    self_sugg,
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
+            // `fn foo(HashMap: TypeName<u32>)`.
+            if self.token != token::Lt {
+                err.span_suggestion_verbose(
+                    param_span,
+                    "if this is a parameter name, give it a type",
+                    param_sugg,
+                    Applicability::HasPlaceholders,
+                );
+            }
+            err.span_suggestion_verbose(
+                type_span,
+                "if this is a type, explicitly ignore the parameter name",
+                type_sugg,
+                Applicability::MachineApplicable,
+            );
+            err.note(rfc_note);
+
+            // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
+            return if self.token == token::Lt { None } else { Some(ident) };
+        }
+        None
+    }
+
+    pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
+        let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName), None)?;
+        self.expect(exp!(Colon))?;
+        let ty = self.parse_ty()?;
+
+        self.dcx().emit_err(PatternMethodParamWithoutBody { span: pat.span });
+
+        // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
+        let pat =
+            P(Pat { kind: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID, tokens: None });
+        Ok((pat, ty))
+    }
+
+    pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
+        let span = param.pat.span;
+        let guar = self.dcx().emit_err(SelfParamNotFirst { span });
+        param.ty.kind = TyKind::Err(guar);
+        Ok(param)
+    }
+
+    pub(super) fn consume_block(
+        &mut self,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
+        consume_close: ConsumeClosingDelim,
+    ) {
+        let mut brace_depth = 0;
+        loop {
+            if self.eat(open) {
+                brace_depth += 1;
+            } else if self.check(close) {
+                if brace_depth == 0 {
+                    if let ConsumeClosingDelim::Yes = consume_close {
+                        // Some of the callers of this method expect to be able to parse the
+                        // closing delimiter themselves, so we leave it alone. Otherwise we advance
+                        // the parser.
+                        self.bump();
+                    }
+                    return;
+                } else {
+                    self.bump();
+                    brace_depth -= 1;
+                    continue;
+                }
+            } else if self.token == token::Eof {
+                return;
+            } else {
+                self.bump();
+            }
+        }
+    }
+
+    pub(super) fn expected_expression_found(&self) -> Diag<'a> {
+        let (span, msg) = match (&self.token.kind, self.subparser_name) {
+            (&token::Eof, Some(origin)) => {
+                let sp = self.prev_token.span.shrink_to_hi();
+                (sp, format!("expected expression, found end of {origin}"))
+            }
+            _ => (
+                self.token.span,
+                format!("expected expression, found {}", super::token_descr(&self.token)),
+            ),
+        };
+        let mut err = self.dcx().struct_span_err(span, msg);
+        let sp = self.psess.source_map().start_point(self.token.span);
+        if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+        }
+        err.span_label(span, "expected expression");
+        err
+    }
+
+    fn consume_tts(
+        &mut self,
+        mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
+        // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
+        modifier: &[(token::TokenKind, i64)],
+    ) {
+        while acc > 0 {
+            if let Some((_, val)) = modifier.iter().find(|(t, _)| self.token == *t) {
+                acc += *val;
+            }
+            if self.token == token::Eof {
+                break;
+            }
+            self.bump();
+        }
+    }
+
+    /// Replace duplicated recovered parameters with `_` pattern to avoid unnecessary errors.
+    ///
+    /// This is necessary because at this point we don't know whether we parsed a function with
+    /// anonymous parameters or a function with names but no types. In order to minimize
+    /// unnecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
+    /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
+    /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
+    /// we deduplicate them to not complain about duplicated parameter names.
+    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut ThinVec<Param>) {
+        let mut seen_inputs = FxHashSet::default();
+        for input in fn_inputs.iter_mut() {
+            let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err(_)) =
+                (&input.pat.kind, &input.ty.kind)
+            {
+                Some(*ident)
+            } else {
+                None
+            };
+            if let Some(ident) = opt_ident {
+                if seen_inputs.contains(&ident) {
+                    input.pat.kind = PatKind::Wild;
+                }
+                seen_inputs.insert(ident);
+            }
+        }
+    }
+
+    /// Handle encountering a symbol in a generic argument list that is not a `,` or `>`. In this
+    /// case, we emit an error and try to suggest enclosing a const argument in braces if it looks
+    /// like the user has forgotten them.
+    pub(super) fn handle_ambiguous_unbraced_const_arg(
+        &mut self,
+        args: &mut ThinVec<AngleBracketedArg>,
+    ) -> PResult<'a, bool> {
+        // If we haven't encountered a closing `>`, then the argument is malformed.
+        // It's likely that the user has written a const expression without enclosing it
+        // in braces, so we try to recover here.
+        let arg = args.pop().unwrap();
+        // FIXME: for some reason using `unexpected` or `expected_one_of_not_found` has
+        // adverse side-effects to subsequent errors and seems to advance the parser.
+        // We are causing this error here exclusively in case that a `const` expression
+        // could be recovered from the current parser state, even if followed by more
+        // arguments after a comma.
+        let mut err = self.dcx().struct_span_err(
+            self.token.span,
+            format!("expected one of `,` or `>`, found {}", super::token_descr(&self.token)),
+        );
+        err.span_label(self.token.span, "expected one of `,` or `>`");
+        match self.recover_const_arg(arg.span(), err) {
+            Ok(arg) => {
+                args.push(AngleBracketedArg::Arg(arg));
+                if self.eat(exp!(Comma)) {
+                    return Ok(true); // Continue
+                }
+            }
+            Err(err) => {
+                args.push(arg);
+                // We will emit a more generic error later.
+                err.delay_as_bug();
+            }
+        }
+        Ok(false) // Don't continue.
+    }
+
+    /// Attempt to parse a generic const argument that has not been enclosed in braces.
+    /// There are a limited number of expressions that are permitted without being encoded
+    /// in braces:
+    /// - Literals.
+    /// - Single-segment paths (i.e. standalone generic const parameters).
+    /// All other expressions that can be parsed will emit an error suggesting the expression be
+    /// wrapped in braces.
+    pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
+        let start = self.token.span;
+        let attrs = self.parse_outer_attributes()?;
+        let (expr, _) =
+            self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
+                err.span_label(
+                    start.shrink_to_lo(),
+                    "while parsing a const generic argument starting here",
+                );
+                err
+            })?;
+        if !self.expr_is_valid_const_arg(&expr) {
+            self.dcx().emit_err(ConstGenericWithoutBraces {
+                span: expr.span,
+                sugg: ConstGenericWithoutBracesSugg {
+                    left: expr.span.shrink_to_lo(),
+                    right: expr.span.shrink_to_hi(),
+                },
+            });
+        }
+        Ok(expr)
+    }
+
+    fn recover_const_param_decl(&mut self, ty_generics: Option<&Generics>) -> Option<GenericArg> {
+        let snapshot = self.create_snapshot_for_diagnostic();
+        let param = match self.parse_const_param(AttrVec::new()) {
+            Ok(param) => param,
+            Err(err) => {
+                err.cancel();
+                self.restore_snapshot(snapshot);
+                return None;
+            }
+        };
+
+        let ident = param.ident.to_string();
+        let sugg = match (ty_generics, self.psess.source_map().span_to_snippet(param.span())) {
+            (Some(Generics { params, span: impl_generics, .. }), Ok(snippet)) => {
+                Some(match &params[..] {
+                    [] => UnexpectedConstParamDeclarationSugg::AddParam {
+                        impl_generics: *impl_generics,
+                        incorrect_decl: param.span(),
+                        snippet,
+                        ident,
+                    },
+                    [.., generic] => UnexpectedConstParamDeclarationSugg::AppendParam {
+                        impl_generics_end: generic.span().shrink_to_hi(),
+                        incorrect_decl: param.span(),
+                        snippet,
+                        ident,
+                    },
+                })
+            }
+            _ => None,
+        };
+        let guar =
+            self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
+
+        let value = self.mk_expr_err(param.span(), guar);
+        Some(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }))
+    }
+
+    pub(super) fn recover_const_param_declaration(
+        &mut self,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, Option<GenericArg>> {
+        // We have to check for a few different cases.
+        if let Some(arg) = self.recover_const_param_decl(ty_generics) {
+            return Ok(Some(arg));
+        }
+
+        // We haven't consumed `const` yet.
+        let start = self.token.span;
+        self.bump(); // `const`
+
+        // Detect and recover from the old, pre-RFC2000 syntax for const generics.
+        let mut err = UnexpectedConstInGenericParam { span: start, to_remove: None };
+        if self.check_const_arg() {
+            err.to_remove = Some(start.until(self.token.span));
+            self.dcx().emit_err(err);
+            Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
+        } else {
+            let after_kw_const = self.token.span;
+            self.recover_const_arg(after_kw_const, self.dcx().create_err(err)).map(Some)
+        }
+    }
+
+    /// Try to recover from possible generic const argument without `{` and `}`.
+    ///
+    /// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
+    /// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
+    /// if we think that the resulting expression would be well formed.
+    pub(super) fn recover_const_arg(
+        &mut self,
+        start: Span,
+        mut err: Diag<'a>,
+    ) -> PResult<'a, GenericArg> {
+        let is_op_or_dot = AssocOp::from_token(&self.token)
+            .and_then(|op| {
+                if let AssocOp::Binary(
+                    BinOpKind::Gt
+                    | BinOpKind::Lt
+                    | BinOpKind::Shr
+                    | BinOpKind::Ge
+                )
+                // Don't recover from `foo::<bar = baz>`, because this could be an attempt to
+                // assign a value to a defaulted generic parameter.
+                | AssocOp::Assign
+                | AssocOp::AssignOp(_) = op
+                {
+                    None
+                } else {
+                    Some(op)
+                }
+            })
+            .is_some()
+            || self.token == TokenKind::Dot;
+        // This will be true when a trait object type `Foo +` or a path which was a `const fn` with
+        // type params has been parsed.
+        let was_op = matches!(self.prev_token.kind, token::Plus | token::Shr | token::Gt);
+        if !is_op_or_dot && !was_op {
+            // We perform these checks and early return to avoid taking a snapshot unnecessarily.
+            return Err(err);
+        }
+        let snapshot = self.create_snapshot_for_diagnostic();
+        if is_op_or_dot {
+            self.bump();
+        }
+        match (|| {
+            let attrs = self.parse_outer_attributes()?;
+            self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
+        })() {
+            Ok((expr, _)) => {
+                // Find a mistake like `MyTrait<Assoc == S::Assoc>`.
+                if snapshot.token == token::EqEq {
+                    err.span_suggestion(
+                        snapshot.token.span,
+                        "if you meant to use an associated type binding, replace `==` with `=`",
+                        "=",
+                        Applicability::MaybeIncorrect,
+                    );
+                    let guar = err.emit();
+                    let value = self.mk_expr_err(start.to(expr.span), guar);
+                    return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
+                } else if snapshot.token == token::Colon
+                    && expr.span.lo() == snapshot.token.span.hi()
+                    && matches!(expr.kind, ExprKind::Path(..))
+                {
+                    // Find a mistake like "foo::var:A".
+                    err.span_suggestion(
+                        snapshot.token.span,
+                        "write a path separator here",
+                        "::",
+                        Applicability::MaybeIncorrect,
+                    );
+                    let guar = err.emit();
+                    return Ok(GenericArg::Type(
+                        self.mk_ty(start.to(expr.span), TyKind::Err(guar)),
+                    ));
+                } else if self.token == token::Comma || self.token.kind.should_end_const_arg() {
+                    // Avoid the following output by checking that we consumed a full const arg:
+                    // help: expressions must be enclosed in braces to be used as const generic
+                    //       arguments
+                    //    |
+                    // LL |     let sr: Vec<{ (u32, _, _) = vec![] };
+                    //    |                 ^                      ^
+                    return Ok(self.dummy_const_arg_needs_braces(err, start.to(expr.span)));
+                }
+            }
+            Err(err) => {
+                err.cancel();
+            }
+        }
+        self.restore_snapshot(snapshot);
+        Err(err)
+    }
+
+    /// Try to recover from an unbraced const argument whose first token [could begin a type][ty].
+    ///
+    /// [ty]: token::Token::can_begin_type
+    pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty(
+        &mut self,
+        mut snapshot: SnapshotParser<'a>,
+    ) -> Option<P<ast::Expr>> {
+        match (|| {
+            let attrs = self.parse_outer_attributes()?;
+            snapshot.parse_expr_res(Restrictions::CONST_EXPR, attrs)
+        })() {
+            // Since we don't know the exact reason why we failed to parse the type or the
+            // expression, employ a simple heuristic to weed out some pathological cases.
+            Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => {
+                self.restore_snapshot(snapshot);
+                Some(expr)
+            }
+            Ok(_) => None,
+            Err(err) => {
+                err.cancel();
+                None
+            }
+        }
+    }
+
+    /// Creates a dummy const argument, and reports that the expression must be enclosed in braces
+    pub(super) fn dummy_const_arg_needs_braces(&self, mut err: Diag<'a>, span: Span) -> GenericArg {
+        err.multipart_suggestion(
+            "expressions must be enclosed in braces to be used as const generic \
+             arguments",
+            vec![(span.shrink_to_lo(), "{ ".to_string()), (span.shrink_to_hi(), " }".to_string())],
+            Applicability::MaybeIncorrect,
+        );
+        let guar = err.emit();
+        let value = self.mk_expr_err(span, guar);
+        GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })
+    }
+
+    /// Some special error handling for the "top-level" patterns in a match arm,
+    /// `for` loop, `let`, &c. (in contrast to subpatterns within such).
+    pub(crate) fn maybe_recover_colon_colon_in_pat_typo(
+        &mut self,
+        mut first_pat: P<Pat>,
+        expected: Option<Expected>,
+    ) -> P<Pat> {
+        if token::Colon != self.token.kind {
+            return first_pat;
+        }
+        if !matches!(first_pat.kind, PatKind::Ident(_, _, None) | PatKind::Path(..))
+            || !self.look_ahead(1, |token| token.is_non_reserved_ident())
+        {
+            let mut snapshot_type = self.create_snapshot_for_diagnostic();
+            snapshot_type.bump(); // `:`
+            match snapshot_type.parse_ty() {
+                Err(inner_err) => {
+                    inner_err.cancel();
+                }
+                Ok(ty) => {
+                    let Err(mut err) = self.expected_one_of_not_found(&[], &[]) else {
+                        return first_pat;
+                    };
+                    err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+                    self.restore_snapshot(snapshot_type);
+                    let span = first_pat.span.to(ty.span);
+                    first_pat = self.mk_pat(span, PatKind::Wild);
+                    err.emit();
+                }
+            }
+            return first_pat;
+        }
+        // The pattern looks like it might be a path with a `::` -> `:` typo:
+        // `match foo { bar:baz => {} }`
+        let colon_span = self.token.span;
+        // We only emit "unexpected `:`" error here if we can successfully parse the
+        // whole pattern correctly in that case.
+        let mut snapshot_pat = self.create_snapshot_for_diagnostic();
+        let mut snapshot_type = self.create_snapshot_for_diagnostic();
+
+        // Create error for "unexpected `:`".
+        match self.expected_one_of_not_found(&[], &[]) {
+            Err(mut err) => {
+                // Skip the `:`.
+                snapshot_pat.bump();
+                snapshot_type.bump();
+                match snapshot_pat.parse_pat_no_top_alt(expected, None) {
+                    Err(inner_err) => {
+                        inner_err.cancel();
+                    }
+                    Ok(mut pat) => {
+                        // We've parsed the rest of the pattern.
+                        let new_span = first_pat.span.to(pat.span);
+                        let mut show_sugg = false;
+                        // Try to construct a recovered pattern.
+                        match &mut pat.kind {
+                            PatKind::Struct(qself @ None, path, ..)
+                            | PatKind::TupleStruct(qself @ None, path, _)
+                            | PatKind::Path(qself @ None, path) => match &first_pat.kind {
+                                PatKind::Ident(_, ident, _) => {
+                                    path.segments.insert(0, PathSegment::from_ident(*ident));
+                                    path.span = new_span;
+                                    show_sugg = true;
+                                    first_pat = pat;
+                                }
+                                PatKind::Path(old_qself, old_path) => {
+                                    path.segments = old_path
+                                        .segments
+                                        .iter()
+                                        .cloned()
+                                        .chain(take(&mut path.segments))
+                                        .collect();
+                                    path.span = new_span;
+                                    *qself = old_qself.clone();
+                                    first_pat = pat;
+                                    show_sugg = true;
+                                }
+                                _ => {}
+                            },
+                            PatKind::Ident(BindingMode::NONE, ident, None) => {
+                                match &first_pat.kind {
+                                    PatKind::Ident(_, old_ident, _) => {
+                                        let path = PatKind::Path(
+                                            None,
+                                            Path {
+                                                span: new_span,
+                                                segments: thin_vec![
+                                                    PathSegment::from_ident(*old_ident),
+                                                    PathSegment::from_ident(*ident),
+                                                ],
+                                                tokens: None,
+                                            },
+                                        );
+                                        first_pat = self.mk_pat(new_span, path);
+                                        show_sugg = true;
+                                    }
+                                    PatKind::Path(old_qself, old_path) => {
+                                        let mut segments = old_path.segments.clone();
+                                        segments.push(PathSegment::from_ident(*ident));
+                                        let path = PatKind::Path(
+                                            old_qself.clone(),
+                                            Path { span: new_span, segments, tokens: None },
+                                        );
+                                        first_pat = self.mk_pat(new_span, path);
+                                        show_sugg = true;
+                                    }
+                                    _ => {}
+                                }
+                            }
+                            _ => {}
+                        }
+                        if show_sugg {
+                            err.span_suggestion_verbose(
+                                colon_span.until(self.look_ahead(1, |t| t.span)),
+                                "maybe write a path separator here",
+                                "::",
+                                Applicability::MaybeIncorrect,
+                            );
+                        } else {
+                            first_pat = self.mk_pat(new_span, PatKind::Wild);
+                        }
+                        self.restore_snapshot(snapshot_pat);
+                    }
+                }
+                match snapshot_type.parse_ty() {
+                    Err(inner_err) => {
+                        inner_err.cancel();
+                    }
+                    Ok(ty) => {
+                        err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+                        self.restore_snapshot(snapshot_type);
+                        let new_span = first_pat.span.to(ty.span);
+                        first_pat = self.mk_pat(new_span, PatKind::Wild);
+                    }
+                }
+                err.emit();
+            }
+            _ => {
+                // Carry on as if we had not done anything. This should be unreachable.
+            }
+        };
+        first_pat
+    }
+
+    /// If `loop_header` is `Some` and an unexpected block label is encountered,
+    /// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
+    pub(crate) fn maybe_recover_unexpected_block_label(
+        &mut self,
+        loop_header: Option<Span>,
+    ) -> bool {
+        // Check for `'a : {`
+        if !(self.check_lifetime()
+            && self.look_ahead(1, |t| *t == token::Colon)
+            && self.look_ahead(2, |t| *t == token::OpenBrace))
+        {
+            return false;
+        }
+        let label = self.eat_label().expect("just checked if a label exists");
+        self.bump(); // eat `:`
+        let span = label.ident.span.to(self.prev_token.span);
+        let mut diag = self
+            .dcx()
+            .struct_span_err(span, "block label not supported here")
+            .with_span_label(span, "not supported here");
+        if let Some(loop_header) = loop_header {
+            diag.multipart_suggestion(
+                "if you meant to label the loop, move this label before the loop",
+                vec![
+                    (label.ident.span.until(self.token.span), String::from("")),
+                    (loop_header.shrink_to_lo(), format!("{}: ", label.ident)),
+                ],
+                Applicability::MachineApplicable,
+            );
+        } else {
+            diag.tool_only_span_suggestion(
+                label.ident.span.until(self.token.span),
+                "remove this block label",
+                "",
+                Applicability::MachineApplicable,
+            );
+        }
+        diag.emit();
+        true
+    }
+
+    /// Some special error handling for the "top-level" patterns in a match arm,
+    /// `for` loop, `let`, &c. (in contrast to subpatterns within such).
+    pub(crate) fn maybe_recover_unexpected_comma(
+        &mut self,
+        lo: Span,
+        rt: CommaRecoveryMode,
+    ) -> PResult<'a, ()> {
+        if self.token != token::Comma {
+            return Ok(());
+        }
+
+        // An unexpected comma after a top-level pattern is a clue that the
+        // user (perhaps more accustomed to some other language) forgot the
+        // parentheses in what should have been a tuple pattern; return a
+        // suggestion-enhanced error here rather than choking on the comma later.
+        let comma_span = self.token.span;
+        self.bump();
+        if let Err(err) = self.skip_pat_list() {
+            // We didn't expect this to work anyway; we just wanted to advance to the
+            // end of the comma-sequence so we know the span to suggest parenthesizing.
+            err.cancel();
+        }
+        let seq_span = lo.to(self.prev_token.span);
+        let mut err = self.dcx().struct_span_err(comma_span, "unexpected `,` in pattern");
+        if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
+            err.multipart_suggestion(
+                format!(
+                    "try adding parentheses to match on a tuple{}",
+                    if let CommaRecoveryMode::LikelyTuple = rt { "" } else { "..." },
+                ),
+                vec![
+                    (seq_span.shrink_to_lo(), "(".to_string()),
+                    (seq_span.shrink_to_hi(), ")".to_string()),
+                ],
+                Applicability::MachineApplicable,
+            );
+            if let CommaRecoveryMode::EitherTupleOrPipe = rt {
+                err.span_suggestion(
+                    seq_span,
+                    "...or a vertical bar to match on multiple alternatives",
+                    seq_snippet.replace(',', " |"),
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+        Err(err)
+    }
+
+    pub(crate) fn maybe_recover_bounds_doubled_colon(&mut self, ty: &Ty) -> PResult<'a, ()> {
+        let TyKind::Path(qself, path) = &ty.kind else { return Ok(()) };
+        let qself_position = qself.as_ref().map(|qself| qself.position);
+        for (i, segments) in path.segments.windows(2).enumerate() {
+            if qself_position.is_some_and(|pos| i < pos) {
+                continue;
+            }
+            if let [a, b] = segments {
+                let (a_span, b_span) = (a.span(), b.span());
+                let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
+                if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
+                    return Err(self.dcx().create_err(DoubleColonInBound {
+                        span: path.span.shrink_to_hi(),
+                        between: between_span,
+                    }));
+                }
+            }
+        }
+        Ok(())
+    }
+
+    /// Check for exclusive ranges written as `..<`
+    pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> {
+        if maybe_lt == token::Lt
+            && (self.expected_token_types.contains(TokenType::Gt)
+                || matches!(self.token.kind, token::Literal(..)))
+        {
+            err.span_suggestion(
+                maybe_lt.span,
+                "remove the `<` to write an exclusive range",
+                "",
+                Applicability::MachineApplicable,
+            );
+        }
+        err
+    }
+
+    /// This checks if this is a conflict marker, depending of the parameter passed.
+    ///
+    /// * `<<<<<<<`
+    /// * `|||||||`
+    /// * `=======`
+    /// * `>>>>>>>`
+    ///
+    pub(super) fn is_vcs_conflict_marker(
+        &mut self,
+        long_kind: &TokenKind,
+        short_kind: &TokenKind,
+    ) -> bool {
+        (0..3).all(|i| self.look_ahead(i, |tok| tok == long_kind))
+            && self.look_ahead(3, |tok| tok == short_kind)
+    }
+
+    fn conflict_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> Option<Span> {
+        if self.is_vcs_conflict_marker(long_kind, short_kind) {
+            let lo = self.token.span;
+            for _ in 0..4 {
+                self.bump();
+            }
+            return Some(lo.to(self.prev_token.span));
+        }
+        None
+    }
+
+    pub(super) fn recover_vcs_conflict_marker(&mut self) {
+        // <<<<<<<
+        let Some(start) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt) else {
+            return;
+        };
+        let mut spans = Vec::with_capacity(3);
+        spans.push(start);
+        // |||||||
+        let mut middlediff3 = None;
+        // =======
+        let mut middle = None;
+        // >>>>>>>
+        let mut end = None;
+        loop {
+            if self.token == TokenKind::Eof {
+                break;
+            }
+            if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or) {
+                middlediff3 = Some(span);
+            }
+            if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) {
+                middle = Some(span);
+            }
+            if let Some(span) = self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt) {
+                spans.push(span);
+                end = Some(span);
+                break;
+            }
+            self.bump();
+        }
+
+        let mut err = self.dcx().struct_span_fatal(spans, "encountered diff marker");
+        match middlediff3 {
+            // We're using diff3
+            Some(middlediff3) => {
+                err.span_label(
+                    start,
+                    "between this marker and `|||||||` is the code that we're merging into",
+                );
+                err.span_label(middlediff3, "between this marker and `=======` is the base code (what the two refs diverged from)");
+            }
+            None => {
+                err.span_label(
+                    start,
+                    "between this marker and `=======` is the code that we're merging into",
+                );
+            }
+        };
+
+        if let Some(middle) = middle {
+            err.span_label(middle, "between this marker and `>>>>>>>` is the incoming code");
+        }
+        if let Some(end) = end {
+            err.span_label(end, "this marker concludes the conflict region");
+        }
+        err.note(
+            "conflict markers indicate that a merge was started but could not be completed due \
+             to merge conflicts\n\
+             to resolve a conflict, keep only the code you want and then delete the lines \
+             containing conflict markers",
+        );
+        err.help(
+            "if you're having merge conflicts after pulling new code:\n\
+             the top section is the code you already had and the bottom section is the remote code\n\
+             if you're in the middle of a rebase:\n\
+             the top section is the code being rebased onto and the bottom section is the code \
+             coming from the current commit being rebased",
+        );
+
+        err.note(
+            "for an explanation on these markers from the `git` documentation:\n\
+             visit <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
+        );
+
+        err.emit();
+    }
+
+    /// Parse and throw away a parenthesized comma separated
+    /// sequence of patterns until `)` is reached.
+    fn skip_pat_list(&mut self) -> PResult<'a, ()> {
+        while !self.check(exp!(CloseParen)) {
+            self.parse_pat_no_top_alt(None, None)?;
+            if !self.eat(exp!(Comma)) {
+                return Ok(());
+            }
+        }
+        Ok(())
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
new file mode 100644
index 00000000000..35b987cf50f
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -0,0 +1,4234 @@
+// ignore-tidy-filelength
+
+use core::mem;
+use core::ops::{Bound, ControlFlow};
+
+use ast::mut_visit::{self, MutVisitor};
+use ast::token::IdentIsRaw;
+use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, Token, TokenKind};
+use rustc_ast::tokenstream::TokenTree;
+use rustc_ast::util::case::Case;
+use rustc_ast::util::classify;
+use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par};
+use rustc_ast::visit::{Visitor, walk_expr};
+use rustc_ast::{
+    self as ast, AnonConst, Arm, AssignOp, AssignOpKind, AttrStyle, AttrVec, BinOp, BinOpKind,
+    BlockCheckMode, CaptureBy, ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl,
+    FnRetTy, Label, MacCall, MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind,
+    UnOp, UnsafeBinderCastKind, YieldKind,
+};
+use rustc_data_structures::stack::ensure_sufficient_stack;
+use rustc_errors::{Applicability, Diag, PResult, StashKey, Subdiagnostic};
+use rustc_literal_escaper::unescape_char;
+use rustc_macros::Subdiagnostic;
+use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error};
+use rustc_session::lint::BuiltinLintDiag;
+use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
+use rustc_span::edition::Edition;
+use rustc_span::source_map::{self, Spanned};
+use rustc_span::{BytePos, ErrorGuaranteed, Ident, Pos, Span, Symbol, kw, sym};
+use thin_vec::{ThinVec, thin_vec};
+use tracing::instrument;
+
+use super::diagnostics::SnapshotParser;
+use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
+use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
+use super::{
+    AttrWrapper, BlockMode, ClosureSpans, ExpTokenPair, ForceCollect, Parser, PathStyle,
+    Restrictions, SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
+};
+use crate::{errors, exp, maybe_recover_from_interpolated_ty_qpath};
+
+#[derive(Debug)]
+pub(super) enum DestructuredFloat {
+    /// 1e2
+    Single(Symbol, Span),
+    /// 1.
+    TrailingDot(Symbol, Span, Span),
+    /// 1.2 | 1.2e3
+    MiddleDot(Symbol, Span, Span, Symbol, Span),
+    /// Invalid
+    Error,
+}
+
+impl<'a> Parser<'a> {
+    /// Parses an expression.
+    #[inline]
+    pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
+        self.current_closure.take();
+
+        let attrs = self.parse_outer_attributes()?;
+        self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0)
+    }
+
+    /// Parses an expression, forcing tokens to be collected.
+    pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
+        self.current_closure.take();
+
+        // If the expression is associative (e.g. `1 + 2`), then any preceding
+        // outer attribute actually belongs to the first inner sub-expression.
+        // In which case we must use the pre-attr pos to include the attribute
+        // in the collected tokens for the outer expression.
+        let pre_attr_pos = self.collect_pos();
+        let attrs = self.parse_outer_attributes()?;
+        self.collect_tokens(
+            Some(pre_attr_pos),
+            AttrWrapper::empty(),
+            ForceCollect::Yes,
+            |this, _empty_attrs| {
+                let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?;
+                let use_pre_attr_pos =
+                    if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No };
+                Ok((expr, Trailing::No, use_pre_attr_pos))
+            },
+        )
+    }
+
+    pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
+        self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value })
+    }
+
+    fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
+        let attrs = self.parse_outer_attributes()?;
+        match self.parse_expr_res(restrictions, attrs) {
+            Ok((expr, _)) => Ok(expr),
+            Err(err) => match self.token.ident() {
+                Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
+                    if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
+                {
+                    // Special-case handling of `foo(_, _, _)`
+                    let guar = err.emit();
+                    self.bump();
+                    Ok(self.mk_expr(self.prev_token.span, ExprKind::Err(guar)))
+                }
+                _ => Err(err),
+            },
+        }
+    }
+
+    /// Parses a sequence of expressions delimited by parentheses.
+    fn parse_expr_paren_seq(&mut self) -> PResult<'a, ThinVec<P<Expr>>> {
+        self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore(Restrictions::empty()))
+            .map(|(r, _)| r)
+    }
+
+    /// Parses an expression, subject to the given restrictions.
+    #[inline]
+    pub(super) fn parse_expr_res(
+        &mut self,
+        r: Restrictions,
+        attrs: AttrWrapper,
+    ) -> PResult<'a, (P<Expr>, bool)> {
+        self.with_res(r, |this| this.parse_expr_assoc_with(Bound::Unbounded, attrs))
+    }
+
+    /// Parses an associative expression with operators of at least `min_prec` precedence.
+    /// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator
+    /// followed by a subexpression (e.g. `1 + 2`).
+    pub(super) fn parse_expr_assoc_with(
+        &mut self,
+        min_prec: Bound<ExprPrecedence>,
+        attrs: AttrWrapper,
+    ) -> PResult<'a, (P<Expr>, bool)> {
+        let lhs = if self.token.is_range_separator() {
+            return self.parse_expr_prefix_range(attrs).map(|res| (res, false));
+        } else {
+            self.parse_expr_prefix(attrs)?
+        };
+        self.parse_expr_assoc_rest_with(min_prec, false, lhs)
+    }
+
+    /// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
+    /// of at least `min_prec` precedence. The `bool` in the return value indicates if something
+    /// was actually parsed.
+    pub(super) fn parse_expr_assoc_rest_with(
+        &mut self,
+        min_prec: Bound<ExprPrecedence>,
+        starts_stmt: bool,
+        mut lhs: P<Expr>,
+    ) -> PResult<'a, (P<Expr>, bool)> {
+        let mut parsed_something = false;
+        if !self.should_continue_as_assoc_expr(&lhs) {
+            return Ok((lhs, parsed_something));
+        }
+
+        self.expected_token_types.insert(TokenType::Operator);
+        while let Some(op) = self.check_assoc_op() {
+            let lhs_span = self.interpolated_or_expr_span(&lhs);
+            let cur_op_span = self.token.span;
+            let restrictions = if op.node.is_assign_like() {
+                self.restrictions & Restrictions::NO_STRUCT_LITERAL
+            } else {
+                self.restrictions
+            };
+            let prec = op.node.precedence();
+            if match min_prec {
+                Bound::Included(min_prec) => prec < min_prec,
+                Bound::Excluded(min_prec) => prec <= min_prec,
+                Bound::Unbounded => false,
+            } {
+                break;
+            }
+            // Check for deprecated `...` syntax
+            if self.token == token::DotDotDot && op.node == AssocOp::Range(RangeLimits::Closed) {
+                self.err_dotdotdot_syntax(self.token.span);
+            }
+
+            if self.token == token::LArrow {
+                self.err_larrow_operator(self.token.span);
+            }
+
+            parsed_something = true;
+            self.bump();
+            if op.node.is_comparison() {
+                if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
+                    return Ok((expr, parsed_something));
+                }
+            }
+
+            // Look for JS' `===` and `!==` and recover
+            if let AssocOp::Binary(bop @ BinOpKind::Eq | bop @ BinOpKind::Ne) = op.node
+                && self.token == token::Eq
+                && self.prev_token.span.hi() == self.token.span.lo()
+            {
+                let sp = op.span.to(self.token.span);
+                let sugg = bop.as_str().into();
+                let invalid = format!("{sugg}=");
+                self.dcx().emit_err(errors::InvalidComparisonOperator {
+                    span: sp,
+                    invalid: invalid.clone(),
+                    sub: errors::InvalidComparisonOperatorSub::Correctable {
+                        span: sp,
+                        invalid,
+                        correct: sugg,
+                    },
+                });
+                self.bump();
+            }
+
+            // Look for PHP's `<>` and recover
+            if op.node == AssocOp::Binary(BinOpKind::Lt)
+                && self.token == token::Gt
+                && self.prev_token.span.hi() == self.token.span.lo()
+            {
+                let sp = op.span.to(self.token.span);
+                self.dcx().emit_err(errors::InvalidComparisonOperator {
+                    span: sp,
+                    invalid: "<>".into(),
+                    sub: errors::InvalidComparisonOperatorSub::Correctable {
+                        span: sp,
+                        invalid: "<>".into(),
+                        correct: "!=".into(),
+                    },
+                });
+                self.bump();
+            }
+
+            // Look for C++'s `<=>` and recover
+            if op.node == AssocOp::Binary(BinOpKind::Le)
+                && self.token == token::Gt
+                && self.prev_token.span.hi() == self.token.span.lo()
+            {
+                let sp = op.span.to(self.token.span);
+                self.dcx().emit_err(errors::InvalidComparisonOperator {
+                    span: sp,
+                    invalid: "<=>".into(),
+                    sub: errors::InvalidComparisonOperatorSub::Spaceship(sp),
+                });
+                self.bump();
+            }
+
+            if self.prev_token == token::Plus
+                && self.token == token::Plus
+                && self.prev_token.span.between(self.token.span).is_empty()
+            {
+                let op_span = self.prev_token.span.to(self.token.span);
+                // Eat the second `+`
+                self.bump();
+                lhs = self.recover_from_postfix_increment(lhs, op_span, starts_stmt)?;
+                continue;
+            }
+
+            if self.prev_token == token::Minus
+                && self.token == token::Minus
+                && self.prev_token.span.between(self.token.span).is_empty()
+                && !self.look_ahead(1, |tok| tok.can_begin_expr())
+            {
+                let op_span = self.prev_token.span.to(self.token.span);
+                // Eat the second `-`
+                self.bump();
+                lhs = self.recover_from_postfix_decrement(lhs, op_span, starts_stmt)?;
+                continue;
+            }
+
+            let op_span = op.span;
+            let op = op.node;
+            // Special cases:
+            if op == AssocOp::Cast {
+                lhs = self.parse_assoc_op_cast(lhs, lhs_span, op_span, ExprKind::Cast)?;
+                continue;
+            } else if let AssocOp::Range(limits) = op {
+                // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
+                // generalise it to the Fixity::None code.
+                lhs = self.parse_expr_range(prec, lhs, limits, cur_op_span)?;
+                break;
+            }
+
+            let min_prec = match op.fixity() {
+                Fixity::Right => Bound::Included(prec),
+                Fixity::Left | Fixity::None => Bound::Excluded(prec),
+            };
+            let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
+                let attrs = this.parse_outer_attributes()?;
+                this.parse_expr_assoc_with(min_prec, attrs)
+            })?;
+
+            let span = self.mk_expr_sp(&lhs, lhs_span, op_span, rhs.span);
+            lhs = match op {
+                AssocOp::Binary(ast_op) => {
+                    let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
+                    self.mk_expr(span, binary)
+                }
+                AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)),
+                AssocOp::AssignOp(aop) => {
+                    let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
+                    self.mk_expr(span, aopexpr)
+                }
+                AssocOp::Cast | AssocOp::Range(_) => {
+                    self.dcx().span_bug(span, "AssocOp should have been handled by special case")
+                }
+            };
+        }
+
+        Ok((lhs, parsed_something))
+    }
+
+    fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
+        match (self.expr_is_complete(lhs), AssocOp::from_token(&self.token)) {
+            // Semi-statement forms are odd:
+            // See https://github.com/rust-lang/rust/issues/29071
+            (true, None) => false,
+            (false, _) => true, // Continue parsing the expression.
+            // An exhaustive check is done in the following block, but these are checked first
+            // because they *are* ambiguous but also reasonable looking incorrect syntax, so we
+            // want to keep their span info to improve diagnostics in these cases in a later stage.
+            (true, Some(AssocOp::Binary(
+                BinOpKind::Mul | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
+                BinOpKind::Sub | // `{ 42 } -5`
+                BinOpKind::Add | // `{ 42 } + 42` (unary plus)
+                BinOpKind::And | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
+                BinOpKind::Or | // `{ 42 } || 42` ("logical or" or closure)
+                BinOpKind::BitOr // `{ 42 } | 42` or `{ 42 } |x| 42`
+            ))) => {
+                // These cases are ambiguous and can't be identified in the parser alone.
+                //
+                // Bitwise AND is left out because guessing intent is hard. We can make
+                // suggestions based on the assumption that double-refs are rarely intentional,
+                // and closures are distinct enough that they don't get mixed up with their
+                // return value.
+                let sp = self.psess.source_map().start_point(self.token.span);
+                self.psess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
+                false
+            }
+            (true, Some(op)) if !op.can_continue_expr_unambiguously() => false,
+            (true, Some(_)) => {
+                self.error_found_expr_would_be_stmt(lhs);
+                true
+            }
+        }
+    }
+
+    /// We've found an expression that would be parsed as a statement,
+    /// but the next token implies this should be parsed as an expression.
+    /// For example: `if let Some(x) = x { x } else { 0 } / 2`.
+    fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
+        self.dcx().emit_err(errors::FoundExprWouldBeStmt {
+            span: self.token.span,
+            token: self.token,
+            suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
+        });
+    }
+
+    /// Possibly translate the current token to an associative operator.
+    /// The method does not advance the current token.
+    ///
+    /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
+    pub(super) fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
+        let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) {
+            // When parsing const expressions, stop parsing when encountering `>`.
+            (
+                Some(
+                    AssocOp::Binary(BinOpKind::Shr | BinOpKind::Gt | BinOpKind::Ge)
+                    | AssocOp::AssignOp(AssignOpKind::ShrAssign),
+                ),
+                _,
+            ) if self.restrictions.contains(Restrictions::CONST_EXPR) => {
+                return None;
+            }
+            // When recovering patterns as expressions, stop parsing when encountering an
+            // assignment `=`, an alternative `|`, or a range `..`.
+            (
+                Some(
+                    AssocOp::Assign
+                    | AssocOp::AssignOp(_)
+                    | AssocOp::Binary(BinOpKind::BitOr)
+                    | AssocOp::Range(_),
+                ),
+                _,
+            ) if self.restrictions.contains(Restrictions::IS_PAT) => {
+                return None;
+            }
+            (Some(op), _) => (op, self.token.span),
+            (None, Some((Ident { name: sym::and, span }, IdentIsRaw::No)))
+                if self.may_recover() =>
+            {
+                self.dcx().emit_err(errors::InvalidLogicalOperator {
+                    span: self.token.span,
+                    incorrect: "and".into(),
+                    sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span),
+                });
+                (AssocOp::Binary(BinOpKind::And), span)
+            }
+            (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => {
+                self.dcx().emit_err(errors::InvalidLogicalOperator {
+                    span: self.token.span,
+                    incorrect: "or".into(),
+                    sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span),
+                });
+                (AssocOp::Binary(BinOpKind::Or), span)
+            }
+            _ => return None,
+        };
+        Some(source_map::respan(span, op))
+    }
+
+    /// Checks if this expression is a successfully parsed statement.
+    fn expr_is_complete(&self, e: &Expr) -> bool {
+        self.restrictions.contains(Restrictions::STMT_EXPR) && classify::expr_is_complete(e)
+    }
+
+    /// Parses `x..y`, `x..=y`, and `x..`/`x..=`.
+    /// The other two variants are handled in `parse_prefix_range_expr` below.
+    fn parse_expr_range(
+        &mut self,
+        prec: ExprPrecedence,
+        lhs: P<Expr>,
+        limits: RangeLimits,
+        cur_op_span: Span,
+    ) -> PResult<'a, P<Expr>> {
+        let rhs = if self.is_at_start_of_range_notation_rhs() {
+            let maybe_lt = self.token;
+            let attrs = self.parse_outer_attributes()?;
+            Some(
+                self.parse_expr_assoc_with(Bound::Excluded(prec), attrs)
+                    .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?
+                    .0,
+            )
+        } else {
+            None
+        };
+        let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span);
+        let span = self.mk_expr_sp(&lhs, lhs.span, cur_op_span, rhs_span);
+        let range = self.mk_range(Some(lhs), rhs, limits);
+        Ok(self.mk_expr(span, range))
+    }
+
+    fn is_at_start_of_range_notation_rhs(&self) -> bool {
+        if self.token.can_begin_expr() {
+            // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
+            if self.token == token::OpenBrace {
+                return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
+            }
+            true
+        } else {
+            false
+        }
+    }
+
+    /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
+    fn parse_expr_prefix_range(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>> {
+        if !attrs.is_empty() {
+            let err = errors::DotDotRangeAttribute { span: self.token.span };
+            self.dcx().emit_err(err);
+        }
+
+        // Check for deprecated `...` syntax.
+        if self.token == token::DotDotDot {
+            self.err_dotdotdot_syntax(self.token.span);
+        }
+
+        debug_assert!(
+            self.token.is_range_separator(),
+            "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
+            self.token
+        );
+
+        let limits = match self.token.kind {
+            token::DotDot => RangeLimits::HalfOpen,
+            _ => RangeLimits::Closed,
+        };
+        let op = AssocOp::from_token(&self.token);
+        let attrs = self.parse_outer_attributes()?;
+        self.collect_tokens_for_expr(attrs, |this, attrs| {
+            let lo = this.token.span;
+            let maybe_lt = this.look_ahead(1, |t| t.clone());
+            this.bump();
+            let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
+                // RHS must be parsed with more associativity than the dots.
+                let attrs = this.parse_outer_attributes()?;
+                this.parse_expr_assoc_with(Bound::Excluded(op.unwrap().precedence()), attrs)
+                    .map(|(x, _)| (lo.to(x.span), Some(x)))
+                    .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
+            } else {
+                (lo, None)
+            };
+            let range = this.mk_range(None, opt_end, limits);
+            Ok(this.mk_expr_with_attrs(span, range, attrs))
+        })
+    }
+
+    /// Parses a prefix-unary-operator expr.
+    fn parse_expr_prefix(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+
+        macro_rules! make_it {
+            ($this:ident, $attrs:expr, |this, _| $body:expr) => {
+                $this.collect_tokens_for_expr($attrs, |$this, attrs| {
+                    let (hi, ex) = $body?;
+                    Ok($this.mk_expr_with_attrs(lo.to(hi), ex, attrs))
+                })
+            };
+        }
+
+        let this = self;
+
+        // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
+        match this.token.uninterpolate().kind {
+            // `!expr`
+            token::Bang => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
+            // `~expr`
+            token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
+            // `-expr`
+            token::Minus => {
+                make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
+            }
+            // `*expr`
+            token::Star => {
+                make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
+            }
+            // `&expr` and `&&expr`
+            token::And | token::AndAnd => {
+                make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
+            }
+            // `+lit`
+            token::Plus if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
+                let mut err = errors::LeadingPlusNotSupported {
+                    span: lo,
+                    remove_plus: None,
+                    add_parentheses: None,
+                };
+
+                // a block on the LHS might have been intended to be an expression instead
+                if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) {
+                    err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp));
+                } else {
+                    err.remove_plus = Some(lo);
+                }
+                this.dcx().emit_err(err);
+
+                this.bump();
+                let attrs = this.parse_outer_attributes()?;
+                this.parse_expr_prefix(attrs)
+            }
+            // Recover from `++x`:
+            token::Plus if this.look_ahead(1, |t| *t == token::Plus) => {
+                let starts_stmt =
+                    this.prev_token == token::Semi || this.prev_token == token::CloseBrace;
+                let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
+                // Eat both `+`s.
+                this.bump();
+                this.bump();
+
+                let operand_expr = this.parse_expr_dot_or_call(attrs)?;
+                this.recover_from_prefix_increment(operand_expr, pre_span, starts_stmt)
+            }
+            token::Ident(..) if this.token.is_keyword(kw::Box) => {
+                make_it!(this, attrs, |this, _| this.parse_expr_box(lo))
+            }
+            token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
+                make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
+            }
+            _ => return this.parse_expr_dot_or_call(attrs),
+        }
+    }
+
+    fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
+        self.bump();
+        let attrs = self.parse_outer_attributes()?;
+        let expr = if self.token.is_range_separator() {
+            self.parse_expr_prefix_range(attrs)
+        } else {
+            self.parse_expr_prefix(attrs)
+        }?;
+        let span = self.interpolated_or_expr_span(&expr);
+        Ok((lo.to(span), expr))
+    }
+
+    fn parse_expr_unary(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
+        let (span, expr) = self.parse_expr_prefix_common(lo)?;
+        Ok((span, self.mk_unary(op, expr)))
+    }
+
+    /// Recover on `~expr` in favor of `!expr`.
+    fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+        self.dcx().emit_err(errors::TildeAsUnaryOperator(lo));
+
+        self.parse_expr_unary(lo, UnOp::Not)
+    }
+
+    /// Parse `box expr` - this syntax has been removed, but we still parse this
+    /// for now to provide a more useful error
+    fn parse_expr_box(&mut self, box_kw: Span) -> PResult<'a, (Span, ExprKind)> {
+        let (span, expr) = self.parse_expr_prefix_common(box_kw)?;
+        // Make a multipart suggestion instead of `span_to_snippet` in case source isn't available
+        let box_kw_and_lo = box_kw.until(self.interpolated_or_expr_span(&expr));
+        let hi = span.shrink_to_hi();
+        let sugg = errors::AddBoxNew { box_kw_and_lo, hi };
+        let guar = self.dcx().emit_err(errors::BoxSyntaxRemoved { span, sugg });
+        Ok((span, ExprKind::Err(guar)))
+    }
+
+    fn is_mistaken_not_ident_negation(&self) -> bool {
+        let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind {
+            // These tokens can start an expression after `!`, but
+            // can't continue an expression after an ident
+            token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
+            token::Literal(..) | token::Pound => true,
+            _ => t.is_metavar_expr(),
+        };
+        self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr)
+    }
+
+    /// Recover on `not expr` in favor of `!expr`.
+    fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+        let negated_token = self.look_ahead(1, |t| *t);
+
+        let sub_diag = if negated_token.is_numeric_lit() {
+            errors::NotAsNegationOperatorSub::SuggestNotBitwise
+        } else if negated_token.is_bool_lit() {
+            errors::NotAsNegationOperatorSub::SuggestNotLogical
+        } else {
+            errors::NotAsNegationOperatorSub::SuggestNotDefault
+        };
+
+        self.dcx().emit_err(errors::NotAsNegationOperator {
+            negated: negated_token.span,
+            negated_desc: super::token_descr(&negated_token),
+            // Span the `not` plus trailing whitespace to avoid
+            // trailing whitespace after the `!` in our suggestion
+            sub: sub_diag(
+                self.psess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
+            ),
+        });
+
+        self.parse_expr_unary(lo, UnOp::Not)
+    }
+
+    /// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
+    fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
+        match self.prev_token.kind {
+            token::NtIdent(..) | token::NtLifetime(..) => self.prev_token.span,
+            token::CloseInvisible(InvisibleOrigin::MetaVar(_)) => {
+                // `expr.span` is the interpolated span, because invisible open
+                // and close delims both get marked with the same span, one
+                // that covers the entire thing between them. (See
+                // `rustc_expand::mbe::transcribe::transcribe`.)
+                self.prev_token.span
+            }
+            _ => expr.span,
+        }
+    }
+
+    fn parse_assoc_op_cast(
+        &mut self,
+        lhs: P<Expr>,
+        lhs_span: Span,
+        op_span: Span,
+        expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind,
+    ) -> PResult<'a, P<Expr>> {
+        let mk_expr = |this: &mut Self, lhs: P<Expr>, rhs: P<Ty>| {
+            this.mk_expr(this.mk_expr_sp(&lhs, lhs_span, op_span, rhs.span), expr_kind(lhs, rhs))
+        };
+
+        // Save the state of the parser before parsing type normally, in case there is a
+        // LessThan comparison after this cast.
+        let parser_snapshot_before_type = self.clone();
+        let cast_expr = match self.parse_as_cast_ty() {
+            Ok(rhs) => mk_expr(self, lhs, rhs),
+            Err(type_err) => {
+                if !self.may_recover() {
+                    return Err(type_err);
+                }
+
+                // Rewind to before attempting to parse the type with generics, to recover
+                // from situations like `x as usize < y` in which we first tried to parse
+                // `usize < y` as a type with generic arguments.
+                let parser_snapshot_after_type = mem::replace(self, parser_snapshot_before_type);
+
+                // Check for typo of `'a: loop { break 'a }` with a missing `'`.
+                match (&lhs.kind, &self.token.kind) {
+                    (
+                        // `foo: `
+                        ExprKind::Path(None, ast::Path { segments, .. }),
+                        token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No),
+                    ) if let [segment] = segments.as_slice() => {
+                        let snapshot = self.create_snapshot_for_diagnostic();
+                        let label = Label {
+                            ident: Ident::from_str_and_span(
+                                &format!("'{}", segment.ident),
+                                segment.ident.span,
+                            ),
+                        };
+                        match self.parse_expr_labeled(label, false) {
+                            Ok(expr) => {
+                                type_err.cancel();
+                                self.dcx().emit_err(errors::MalformedLoopLabel {
+                                    span: label.ident.span,
+                                    suggestion: label.ident.span.shrink_to_lo(),
+                                });
+                                return Ok(expr);
+                            }
+                            Err(err) => {
+                                err.cancel();
+                                self.restore_snapshot(snapshot);
+                            }
+                        }
+                    }
+                    _ => {}
+                }
+
+                match self.parse_path(PathStyle::Expr) {
+                    Ok(path) => {
+                        let span_after_type = parser_snapshot_after_type.token.span;
+                        let expr = mk_expr(
+                            self,
+                            lhs,
+                            self.mk_ty(path.span, TyKind::Path(None, path.clone())),
+                        );
+
+                        let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
+                        let suggestion = errors::ComparisonOrShiftInterpretedAsGenericSugg {
+                            left: expr.span.shrink_to_lo(),
+                            right: expr.span.shrink_to_hi(),
+                        };
+
+                        match self.token.kind {
+                            token::Lt => {
+                                self.dcx().emit_err(errors::ComparisonInterpretedAsGeneric {
+                                    comparison: self.token.span,
+                                    r#type: path,
+                                    args: args_span,
+                                    suggestion,
+                                })
+                            }
+                            token::Shl => self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
+                                shift: self.token.span,
+                                r#type: path,
+                                args: args_span,
+                                suggestion,
+                            }),
+                            _ => {
+                                // We can end up here even without `<` being the next token, for
+                                // example because `parse_ty_no_plus` returns `Err` on keywords,
+                                // but `parse_path` returns `Ok` on them due to error recovery.
+                                // Return original error and parser state.
+                                *self = parser_snapshot_after_type;
+                                return Err(type_err);
+                            }
+                        };
+
+                        // Successfully parsed the type path leaving a `<` yet to parse.
+                        type_err.cancel();
+
+                        // Keep `x as usize` as an expression in AST and continue parsing.
+                        expr
+                    }
+                    Err(path_err) => {
+                        // Couldn't parse as a path, return original error and parser state.
+                        path_err.cancel();
+                        *self = parser_snapshot_after_type;
+                        return Err(type_err);
+                    }
+                }
+            }
+        };
+
+        // Try to parse a postfix operator such as `.`, `?`, or index (`[]`)
+        // after a cast. If one is present, emit an error then return a valid
+        // parse tree; For something like `&x as T[0]` will be as if it was
+        // written `((&x) as T)[0]`.
+
+        let span = cast_expr.span;
+
+        let with_postfix = self.parse_expr_dot_or_call_with(AttrVec::new(), cast_expr, span)?;
+
+        // Check if an illegal postfix operator has been added after the cast.
+        // If the resulting expression is not a cast, it is an illegal postfix operator.
+        if !matches!(with_postfix.kind, ExprKind::Cast(_, _)) {
+            let msg = format!(
+                "cast cannot be followed by {}",
+                match with_postfix.kind {
+                    ExprKind::Index(..) => "indexing",
+                    ExprKind::Try(_) => "`?`",
+                    ExprKind::Field(_, _) => "a field access",
+                    ExprKind::MethodCall(_) => "a method call",
+                    ExprKind::Call(_, _) => "a function call",
+                    ExprKind::Await(_, _) => "`.await`",
+                    ExprKind::Use(_, _) => "`.use`",
+                    ExprKind::Yield(YieldKind::Postfix(_)) => "`.yield`",
+                    ExprKind::Match(_, _, MatchKind::Postfix) => "a postfix match",
+                    ExprKind::Err(_) => return Ok(with_postfix),
+                    _ => unreachable!(
+                        "did not expect {:?} as an illegal postfix operator following cast",
+                        with_postfix.kind
+                    ),
+                }
+            );
+            let mut err = self.dcx().struct_span_err(span, msg);
+
+            let suggest_parens = |err: &mut Diag<'_>| {
+                let suggestions = vec![
+                    (span.shrink_to_lo(), "(".to_string()),
+                    (span.shrink_to_hi(), ")".to_string()),
+                ];
+                err.multipart_suggestion(
+                    "try surrounding the expression in parentheses",
+                    suggestions,
+                    Applicability::MachineApplicable,
+                );
+            };
+
+            suggest_parens(&mut err);
+
+            err.emit();
+        };
+        Ok(with_postfix)
+    }
+
+    /// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
+    fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+        self.expect_and()?;
+        let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
+        let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
+        let (borrow_kind, mutbl) = self.parse_borrow_modifiers();
+        let attrs = self.parse_outer_attributes()?;
+        let expr = if self.token.is_range_separator() {
+            self.parse_expr_prefix_range(attrs)
+        } else {
+            self.parse_expr_prefix(attrs)
+        }?;
+        let hi = self.interpolated_or_expr_span(&expr);
+        let span = lo.to(hi);
+        if let Some(lt) = lifetime {
+            self.error_remove_borrow_lifetime(span, lt.ident.span.until(expr.span));
+        }
+
+        // Add expected tokens if we parsed `&raw` as an expression.
+        // This will make sure we see "expected `const`, `mut`", and
+        // guides recovery in case we write `&raw expr`.
+        if borrow_kind == ast::BorrowKind::Ref
+            && mutbl == ast::Mutability::Not
+            && matches!(&expr.kind, ExprKind::Path(None, p) if *p == kw::Raw)
+        {
+            self.expected_token_types.insert(TokenType::KwMut);
+            self.expected_token_types.insert(TokenType::KwConst);
+        }
+
+        Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr)))
+    }
+
+    fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
+        self.dcx().emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span });
+    }
+
+    /// Parse `mut?` or `[ raw | pin ] [ const | mut ]`.
+    fn parse_borrow_modifiers(&mut self) -> (ast::BorrowKind, ast::Mutability) {
+        if self.check_keyword(exp!(Raw)) && self.look_ahead(1, Token::is_mutability) {
+            // `raw [ const | mut ]`.
+            let found_raw = self.eat_keyword(exp!(Raw));
+            assert!(found_raw);
+            let mutability = self.parse_const_or_mut().unwrap();
+            (ast::BorrowKind::Raw, mutability)
+        } else if let Some((ast::Pinnedness::Pinned, mutbl)) = self.parse_pin_and_mut() {
+            // `pin [ const | mut ]`.
+            // `pin` has been gated in `self.parse_pin_and_mut()` so we don't
+            // need to gate it here.
+            (ast::BorrowKind::Pin, mutbl)
+        } else {
+            // `mut?`
+            (ast::BorrowKind::Ref, self.parse_mutability())
+        }
+    }
+
+    /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
+    fn parse_expr_dot_or_call(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>> {
+        self.collect_tokens_for_expr(attrs, |this, attrs| {
+            let base = this.parse_expr_bottom()?;
+            let span = this.interpolated_or_expr_span(&base);
+            this.parse_expr_dot_or_call_with(attrs, base, span)
+        })
+    }
+
+    pub(super) fn parse_expr_dot_or_call_with(
+        &mut self,
+        mut attrs: ast::AttrVec,
+        mut e: P<Expr>,
+        lo: Span,
+    ) -> PResult<'a, P<Expr>> {
+        let mut res = ensure_sufficient_stack(|| {
+            loop {
+                let has_question =
+                    if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
+                        // We are using noexpect here because we don't expect a `?` directly after
+                        // a `return` which could be suggested otherwise.
+                        self.eat_noexpect(&token::Question)
+                    } else {
+                        self.eat(exp!(Question))
+                    };
+                if has_question {
+                    // `expr?`
+                    e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
+                    continue;
+                }
+                let has_dot = if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
+                    // We are using noexpect here because we don't expect a `.` directly after
+                    // a `return` which could be suggested otherwise.
+                    self.eat_noexpect(&token::Dot)
+                } else if self.token == TokenKind::RArrow && self.may_recover() {
+                    // Recovery for `expr->suffix`.
+                    self.bump();
+                    let span = self.prev_token.span;
+                    self.dcx().emit_err(errors::ExprRArrowCall { span });
+                    true
+                } else {
+                    self.eat(exp!(Dot))
+                };
+                if has_dot {
+                    // expr.f
+                    e = self.parse_dot_suffix_expr(lo, e)?;
+                    continue;
+                }
+                if self.expr_is_complete(&e) {
+                    return Ok(e);
+                }
+                e = match self.token.kind {
+                    token::OpenParen => self.parse_expr_fn_call(lo, e),
+                    token::OpenBracket => self.parse_expr_index(lo, e)?,
+                    _ => return Ok(e),
+                }
+            }
+        });
+
+        // Stitch the list of outer attributes onto the return value. A little
+        // bit ugly, but the best way given the current code structure.
+        if !attrs.is_empty()
+            && let Ok(expr) = &mut res
+        {
+            mem::swap(&mut expr.attrs, &mut attrs);
+            expr.attrs.extend(attrs)
+        }
+        res
+    }
+
+    pub(super) fn parse_dot_suffix_expr(
+        &mut self,
+        lo: Span,
+        base: P<Expr>,
+    ) -> PResult<'a, P<Expr>> {
+        // At this point we've consumed something like `expr.` and `self.token` holds the token
+        // after the dot.
+        match self.token.uninterpolate().kind {
+            token::Ident(..) => self.parse_dot_suffix(base, lo),
+            token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
+                let ident_span = self.token.span;
+                self.bump();
+                Ok(self.mk_expr_tuple_field_access(lo, ident_span, base, symbol, suffix))
+            }
+            token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
+                Ok(match self.break_up_float(symbol, self.token.span) {
+                    // 1e2
+                    DestructuredFloat::Single(sym, _sp) => {
+                        // `foo.1e2`: a single complete dot access, fully consumed. We end up with
+                        // the `1e2` token in `self.prev_token` and the following token in
+                        // `self.token`.
+                        let ident_span = self.token.span;
+                        self.bump();
+                        self.mk_expr_tuple_field_access(lo, ident_span, base, sym, suffix)
+                    }
+                    // 1.
+                    DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
+                        // `foo.1.`: a single complete dot access and the start of another.
+                        // We end up with the `sym` (`1`) token in `self.prev_token` and a dot in
+                        // `self.token`.
+                        assert!(suffix.is_none());
+                        self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
+                        self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing));
+                        self.mk_expr_tuple_field_access(lo, ident_span, base, sym, None)
+                    }
+                    // 1.2 | 1.2e3
+                    DestructuredFloat::MiddleDot(
+                        sym1,
+                        ident1_span,
+                        _dot_span,
+                        sym2,
+                        ident2_span,
+                    ) => {
+                        // `foo.1.2` (or `foo.1.2e3`): two complete dot accesses. We end up with
+                        // the `sym2` (`2` or `2e3`) token in `self.prev_token` and the following
+                        // token in `self.token`.
+                        let next_token2 =
+                            Token::new(token::Ident(sym2, IdentIsRaw::No), ident2_span);
+                        self.bump_with((next_token2, self.token_spacing));
+                        self.bump();
+                        let base1 =
+                            self.mk_expr_tuple_field_access(lo, ident1_span, base, sym1, None);
+                        self.mk_expr_tuple_field_access(lo, ident2_span, base1, sym2, suffix)
+                    }
+                    DestructuredFloat::Error => base,
+                })
+            }
+            _ => {
+                self.error_unexpected_after_dot();
+                Ok(base)
+            }
+        }
+    }
+
+    fn error_unexpected_after_dot(&self) {
+        let actual = super::token_descr(&self.token);
+        let span = self.token.span;
+        let sm = self.psess.source_map();
+        let (span, actual) = match (&self.token.kind, self.subparser_name) {
+            (token::Eof, Some(_)) if let Ok(snippet) = sm.span_to_snippet(sm.next_point(span)) => {
+                (span.shrink_to_hi(), format!("`{}`", snippet))
+            }
+            (token::CloseInvisible(InvisibleOrigin::MetaVar(_)), _) => {
+                // No need to report an error. This case will only occur when parsing a pasted
+                // metavariable, and we should have emitted an error when parsing the macro call in
+                // the first place. E.g. in this code:
+                // ```
+                // macro_rules! m { ($e:expr) => { $e }; }
+                //
+                // fn main() {
+                //     let f = 1;
+                //     m!(f.);
+                // }
+                // ```
+                // we'll get an error "unexpected token: `)` when parsing the `m!(f.)`, so we don't
+                // want to issue a second error when parsing the expansion `«f.»` (where `«`/`»`
+                // represent the invisible delimiters).
+                self.dcx().span_delayed_bug(span, "bad dot expr in metavariable");
+                return;
+            }
+            _ => (span, actual),
+        };
+        self.dcx().emit_err(errors::UnexpectedTokenAfterDot { span, actual });
+    }
+
+    /// We need an identifier or integer, but the next token is a float.
+    /// Break the float into components to extract the identifier or integer.
+    ///
+    /// See also [`TokenKind::break_two_token_op`] which does similar splitting of `>>` into `>`.
+    //
+    // FIXME: With current `TokenCursor` it's hard to break tokens into more than 2
+    //  parts unless those parts are processed immediately. `TokenCursor` should either
+    //  support pushing "future tokens" (would be also helpful to `break_and_eat`), or
+    //  we should break everything including floats into more basic proc-macro style
+    //  tokens in the lexer (probably preferable).
+    pub(super) fn break_up_float(&self, float: Symbol, span: Span) -> DestructuredFloat {
+        #[derive(Debug)]
+        enum FloatComponent {
+            IdentLike(String),
+            Punct(char),
+        }
+        use FloatComponent::*;
+
+        let float_str = float.as_str();
+        let mut components = Vec::new();
+        let mut ident_like = String::new();
+        for c in float_str.chars() {
+            if c == '_' || c.is_ascii_alphanumeric() {
+                ident_like.push(c);
+            } else if matches!(c, '.' | '+' | '-') {
+                if !ident_like.is_empty() {
+                    components.push(IdentLike(mem::take(&mut ident_like)));
+                }
+                components.push(Punct(c));
+            } else {
+                panic!("unexpected character in a float token: {c:?}")
+            }
+        }
+        if !ident_like.is_empty() {
+            components.push(IdentLike(ident_like));
+        }
+
+        // With proc macros the span can refer to anything, the source may be too short,
+        // or too long, or non-ASCII. It only makes sense to break our span into components
+        // if its underlying text is identical to our float literal.
+        let can_take_span_apart =
+            || self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref();
+
+        match &*components {
+            // 1e2
+            [IdentLike(i)] => {
+                DestructuredFloat::Single(Symbol::intern(i), span)
+            }
+            // 1.
+            [IdentLike(left), Punct('.')] => {
+                let (left_span, dot_span) = if can_take_span_apart() {
+                    let left_span = span.with_hi(span.lo() + BytePos::from_usize(left.len()));
+                    let dot_span = span.with_lo(left_span.hi());
+                    (left_span, dot_span)
+                } else {
+                    (span, span)
+                };
+                let left = Symbol::intern(left);
+                DestructuredFloat::TrailingDot(left, left_span, dot_span)
+            }
+            // 1.2 | 1.2e3
+            [IdentLike(left), Punct('.'), IdentLike(right)] => {
+                let (left_span, dot_span, right_span) = if can_take_span_apart() {
+                    let left_span = span.with_hi(span.lo() + BytePos::from_usize(left.len()));
+                    let dot_span = span.with_lo(left_span.hi()).with_hi(left_span.hi() + BytePos(1));
+                    let right_span = span.with_lo(dot_span.hi());
+                    (left_span, dot_span, right_span)
+                } else {
+                    (span, span, span)
+                };
+                let left = Symbol::intern(left);
+                let right = Symbol::intern(right);
+                DestructuredFloat::MiddleDot(left, left_span, dot_span, right, right_span)
+            }
+            // 1e+ | 1e- (recovered)
+            [IdentLike(_), Punct('+' | '-')] |
+            // 1e+2 | 1e-2
+            [IdentLike(_), Punct('+' | '-'), IdentLike(_)] |
+            // 1.2e+ | 1.2e-
+            [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-')] |
+            // 1.2e+3 | 1.2e-3
+            [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => {
+                // See the FIXME about `TokenCursor` above.
+                self.error_unexpected_after_dot();
+                DestructuredFloat::Error
+            }
+            _ => panic!("unexpected components in a float token: {components:?}"),
+        }
+    }
+
+    /// Parse the field access used in offset_of, matched by `$(e:expr)+`.
+    /// Currently returns a list of idents. However, it should be possible in
+    /// future to also do array indices, which might be arbitrary expressions.
+    fn parse_floating_field_access(&mut self) -> PResult<'a, Vec<Ident>> {
+        let mut fields = Vec::new();
+        let mut trailing_dot = None;
+
+        loop {
+            // This is expected to use a metavariable $(args:expr)+, but the builtin syntax
+            // could be called directly. Calling `parse_expr` allows this function to only
+            // consider `Expr`s.
+            let expr = self.parse_expr()?;
+            let mut current = &expr;
+            let start_idx = fields.len();
+            loop {
+                match current.kind {
+                    ExprKind::Field(ref left, right) => {
+                        // Field access is read right-to-left.
+                        fields.insert(start_idx, right);
+                        trailing_dot = None;
+                        current = left;
+                    }
+                    // Parse this both to give helpful error messages and to
+                    // verify it can be done with this parser setup.
+                    ExprKind::Index(ref left, ref _right, span) => {
+                        self.dcx().emit_err(errors::ArrayIndexInOffsetOf(span));
+                        current = left;
+                    }
+                    ExprKind::Lit(token::Lit {
+                        kind: token::Float | token::Integer,
+                        symbol,
+                        suffix,
+                    }) => {
+                        if let Some(suffix) = suffix {
+                            self.expect_no_tuple_index_suffix(current.span, suffix);
+                        }
+                        match self.break_up_float(symbol, current.span) {
+                            // 1e2
+                            DestructuredFloat::Single(sym, sp) => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(sym, sp));
+                            }
+                            // 1.
+                            DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => {
+                                assert!(suffix.is_none());
+                                trailing_dot = Some(dot_span);
+                                fields.insert(start_idx, Ident::new(sym, sym_span));
+                            }
+                            // 1.2 | 1.2e3
+                            DestructuredFloat::MiddleDot(
+                                symbol1,
+                                span1,
+                                _dot_span,
+                                symbol2,
+                                span2,
+                            ) => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(symbol2, span2));
+                                fields.insert(start_idx, Ident::new(symbol1, span1));
+                            }
+                            DestructuredFloat::Error => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(symbol, self.prev_token.span));
+                            }
+                        }
+                        break;
+                    }
+                    ExprKind::Path(None, Path { ref segments, .. }) => {
+                        match &segments[..] {
+                            [PathSegment { ident, args: None, .. }] => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, *ident)
+                            }
+                            _ => {
+                                self.dcx().emit_err(errors::InvalidOffsetOf(current.span));
+                                break;
+                            }
+                        }
+                        break;
+                    }
+                    _ => {
+                        self.dcx().emit_err(errors::InvalidOffsetOf(current.span));
+                        break;
+                    }
+                }
+            }
+
+            if self.token.kind.close_delim().is_some() || self.token.kind == token::Comma {
+                break;
+            } else if trailing_dot.is_none() {
+                // This loop should only repeat if there is a trailing dot.
+                self.dcx().emit_err(errors::InvalidOffsetOf(self.token.span));
+                break;
+            }
+        }
+        if let Some(dot) = trailing_dot {
+            self.dcx().emit_err(errors::InvalidOffsetOf(dot));
+        }
+        Ok(fields.into_iter().collect())
+    }
+
+    fn mk_expr_tuple_field_access(
+        &self,
+        lo: Span,
+        ident_span: Span,
+        base: P<Expr>,
+        field: Symbol,
+        suffix: Option<Symbol>,
+    ) -> P<Expr> {
+        if let Some(suffix) = suffix {
+            self.expect_no_tuple_index_suffix(ident_span, suffix);
+        }
+        self.mk_expr(lo.to(ident_span), ExprKind::Field(base, Ident::new(field, ident_span)))
+    }
+
+    /// Parse a function call expression, `expr(...)`.
+    fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
+        let snapshot = if self.token == token::OpenParen {
+            Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
+        } else {
+            None
+        };
+        let open_paren = self.token.span;
+
+        let seq = self
+            .parse_expr_paren_seq()
+            .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
+        match self.maybe_recover_struct_lit_bad_delims(lo, open_paren, seq, snapshot) {
+            Ok(expr) => expr,
+            Err(err) => self.recover_seq_parse_error(exp!(OpenParen), exp!(CloseParen), lo, err),
+        }
+    }
+
+    /// If we encounter a parser state that looks like the user has written a `struct` literal with
+    /// parentheses instead of braces, recover the parser state and provide suggestions.
+    #[instrument(skip(self, seq, snapshot), level = "trace")]
+    fn maybe_recover_struct_lit_bad_delims(
+        &mut self,
+        lo: Span,
+        open_paren: Span,
+        seq: PResult<'a, P<Expr>>,
+        snapshot: Option<(SnapshotParser<'a>, ExprKind)>,
+    ) -> PResult<'a, P<Expr>> {
+        match (self.may_recover(), seq, snapshot) {
+            (true, Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
+                snapshot.bump(); // `(`
+                match snapshot.parse_struct_fields(path.clone(), false, exp!(CloseParen)) {
+                    Ok((fields, ..)) if snapshot.eat(exp!(CloseParen)) => {
+                        // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
+                        // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
+                        self.restore_snapshot(snapshot);
+                        let close_paren = self.prev_token.span;
+                        let span = lo.to(close_paren);
+                        // filter shorthand fields
+                        let fields: Vec<_> =
+                            fields.into_iter().filter(|field| !field.is_shorthand).collect();
+
+                        let guar = if !fields.is_empty() &&
+                            // `token.kind` should not be compared here.
+                            // This is because the `snapshot.token.kind` is treated as the same as
+                            // that of the open delim in `TokenTreesReader::parse_token_tree`, even
+                            // if they are different.
+                            self.span_to_snippet(close_paren).is_ok_and(|snippet| snippet == ")")
+                        {
+                            err.cancel();
+                            self.dcx()
+                                .create_err(errors::ParenthesesWithStructFields {
+                                    span,
+                                    r#type: path,
+                                    braces_for_struct: errors::BracesForStructLiteral {
+                                        first: open_paren,
+                                        second: close_paren,
+                                    },
+                                    no_fields_for_fn: errors::NoFieldsForFnCall {
+                                        fields: fields
+                                            .into_iter()
+                                            .map(|field| field.span.until(field.expr.span))
+                                            .collect(),
+                                    },
+                                })
+                                .emit()
+                        } else {
+                            err.emit()
+                        };
+                        Ok(self.mk_expr_err(span, guar))
+                    }
+                    Ok(_) => Err(err),
+                    Err(err2) => {
+                        err2.cancel();
+                        Err(err)
+                    }
+                }
+            }
+            (_, seq, _) => seq,
+        }
+    }
+
+    /// Parse an indexing expression `expr[...]`.
+    fn parse_expr_index(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+        let prev_span = self.prev_token.span;
+        let open_delim_span = self.token.span;
+        self.bump(); // `[`
+        let index = self.parse_expr()?;
+        self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?;
+        self.expect(exp!(CloseBracket))?;
+        Ok(self.mk_expr(
+            lo.to(self.prev_token.span),
+            self.mk_index(base, index, open_delim_span.to(self.prev_token.span)),
+        ))
+    }
+
+    /// Assuming we have just parsed `.`, continue parsing into an expression.
+    fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+        if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
+            return Ok(self.mk_await_expr(self_arg, lo));
+        }
+
+        if self.eat_keyword(exp!(Use)) {
+            let use_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::ergonomic_clones, use_span);
+            return Ok(self.mk_use_expr(self_arg, lo));
+        }
+
+        // Post-fix match
+        if self.eat_keyword(exp!(Match)) {
+            let match_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::postfix_match, match_span);
+            return self.parse_match_block(lo, match_span, self_arg, MatchKind::Postfix);
+        }
+
+        // Parse a postfix `yield`.
+        if self.eat_keyword(exp!(Yield)) {
+            let yield_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::yield_expr, yield_span);
+            return Ok(
+                self.mk_expr(lo.to(yield_span), ExprKind::Yield(YieldKind::Postfix(self_arg)))
+            );
+        }
+
+        let fn_span_lo = self.token.span;
+        let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
+        self.check_trailing_angle_brackets(&seg, &[exp!(OpenParen)]);
+        self.check_turbofish_missing_angle_brackets(&mut seg);
+
+        if self.check(exp!(OpenParen)) {
+            // Method call `expr.f()`
+            let args = self.parse_expr_paren_seq()?;
+            let fn_span = fn_span_lo.to(self.prev_token.span);
+            let span = lo.to(self.prev_token.span);
+            Ok(self.mk_expr(
+                span,
+                ExprKind::MethodCall(Box::new(ast::MethodCall {
+                    seg,
+                    receiver: self_arg,
+                    args,
+                    span: fn_span,
+                })),
+            ))
+        } else {
+            // Field access `expr.f`
+            let span = lo.to(self.prev_token.span);
+            if let Some(args) = seg.args {
+                // See `StashKey::GenericInFieldExpr` for more info on why we stash this.
+                self.dcx()
+                    .create_err(errors::FieldExpressionWithGeneric(args.span()))
+                    .stash(seg.ident.span, StashKey::GenericInFieldExpr);
+            }
+
+            Ok(self.mk_expr(span, ExprKind::Field(self_arg, seg.ident)))
+        }
+    }
+
+    /// At the bottom (top?) of the precedence hierarchy,
+    /// Parses things like parenthesized exprs, macros, `return`, etc.
+    ///
+    /// N.B., this does not parse outer attributes, and is private because it only works
+    /// correctly if called from `parse_expr_dot_or_call`.
+    fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
+        maybe_recover_from_interpolated_ty_qpath!(self, true);
+
+        let span = self.token.span;
+        if let Some(expr) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
+            |this| {
+                // Force collection (as opposed to just `parse_expr`) is required to avoid the
+                // attribute duplication seen in #138478.
+                let expr = this.parse_expr_force_collect();
+                // FIXME(nnethercote) Sometimes with expressions we get a trailing comma, possibly
+                // related to the FIXME in `collect_tokens_for_expr`. Examples are the multi-line
+                // `assert_eq!` calls involving arguments annotated with `#[rustfmt::skip]` in
+                // `compiler/rustc_index/src/bit_set/tests.rs`.
+                if this.token.kind == token::Comma {
+                    this.bump();
+                }
+                expr
+            },
+        ) {
+            return Ok(expr);
+        } else if let Some(lit) =
+            self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
+        {
+            return Ok(lit);
+        } else if let Some(block) =
+            self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block())
+        {
+            return Ok(self.mk_expr(span, ExprKind::Block(block, None)));
+        } else if let Some(path) =
+            self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
+        {
+            return Ok(self.mk_expr(span, ExprKind::Path(None, path)));
+        }
+
+        // Outer attributes are already parsed and will be
+        // added to the return value after the fact.
+
+        let restrictions = self.restrictions;
+        self.with_res(restrictions - Restrictions::ALLOW_LET, |this| {
+            // Note: adding new syntax here? Don't forget to adjust `TokenKind::can_begin_expr()`.
+            let lo = this.token.span;
+            if let token::Literal(_) = this.token.kind {
+                // This match arm is a special-case of the `_` match arm below and
+                // could be removed without changing functionality, but it's faster
+                // to have it here, especially for programs with large constants.
+                this.parse_expr_lit()
+            } else if this.check(exp!(OpenParen)) {
+                this.parse_expr_tuple_parens(restrictions)
+            } else if this.check(exp!(OpenBrace)) {
+                this.parse_expr_block(None, lo, BlockCheckMode::Default)
+            } else if this.check(exp!(Or)) || this.check(exp!(OrOr)) {
+                this.parse_expr_closure().map_err(|mut err| {
+                    // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
+                    // then suggest parens around the lhs.
+                    if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) {
+                        err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+                    }
+                    err
+                })
+            } else if this.check(exp!(OpenBracket)) {
+                this.parse_expr_array_or_repeat(exp!(CloseBracket))
+            } else if this.is_builtin() {
+                this.parse_expr_builtin()
+            } else if this.check_path() {
+                this.parse_expr_path_start()
+            } else if this.check_keyword(exp!(Move))
+                || this.check_keyword(exp!(Use))
+                || this.check_keyword(exp!(Static))
+                || this.check_const_closure()
+            {
+                this.parse_expr_closure()
+            } else if this.eat_keyword(exp!(If)) {
+                this.parse_expr_if()
+            } else if this.check_keyword(exp!(For)) {
+                if this.choose_generics_over_qpath(1) {
+                    this.parse_expr_closure()
+                } else {
+                    assert!(this.eat_keyword(exp!(For)));
+                    this.parse_expr_for(None, lo)
+                }
+            } else if this.eat_keyword(exp!(While)) {
+                this.parse_expr_while(None, lo)
+            } else if let Some(label) = this.eat_label() {
+                this.parse_expr_labeled(label, true)
+            } else if this.eat_keyword(exp!(Loop)) {
+                this.parse_expr_loop(None, lo).map_err(|mut err| {
+                    err.span_label(lo, "while parsing this `loop` expression");
+                    err
+                })
+            } else if this.eat_keyword(exp!(Match)) {
+                this.parse_expr_match().map_err(|mut err| {
+                    err.span_label(lo, "while parsing this `match` expression");
+                    err
+                })
+            } else if this.eat_keyword(exp!(Unsafe)) {
+                this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
+                    |mut err| {
+                        err.span_label(lo, "while parsing this `unsafe` expression");
+                        err
+                    },
+                )
+            } else if this.check_inline_const(0) {
+                this.parse_const_block(lo, false)
+            } else if this.may_recover() && this.is_do_catch_block() {
+                this.recover_do_catch()
+            } else if this.is_try_block() {
+                this.expect_keyword(exp!(Try))?;
+                this.parse_try_block(lo)
+            } else if this.eat_keyword(exp!(Return)) {
+                this.parse_expr_return()
+            } else if this.eat_keyword(exp!(Continue)) {
+                this.parse_expr_continue(lo)
+            } else if this.eat_keyword(exp!(Break)) {
+                this.parse_expr_break()
+            } else if this.eat_keyword(exp!(Yield)) {
+                this.parse_expr_yield()
+            } else if this.is_do_yeet() {
+                this.parse_expr_yeet()
+            } else if this.eat_keyword(exp!(Become)) {
+                this.parse_expr_become()
+            } else if this.check_keyword(exp!(Let)) {
+                this.parse_expr_let(restrictions)
+            } else if this.eat_keyword(exp!(Underscore)) {
+                Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
+            } else if this.token_uninterpolated_span().at_least_rust_2018() {
+                // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
+                let at_async = this.check_keyword(exp!(Async));
+                // check for `gen {}` and `gen move {}`
+                // or `async gen {}` and `async gen move {}`
+                // FIXME: (async) gen closures aren't yet parsed.
+                // FIXME(gen_blocks): Parse `gen async` and suggest swap
+                if this.token_uninterpolated_span().at_least_rust_2024()
+                    && this.is_gen_block(kw::Gen, at_async as usize)
+                {
+                    this.parse_gen_block()
+                // Check for `async {` and `async move {`,
+                } else if this.is_gen_block(kw::Async, 0) {
+                    this.parse_gen_block()
+                } else if at_async {
+                    this.parse_expr_closure()
+                } else if this.eat_keyword_noexpect(kw::Await) {
+                    this.recover_incorrect_await_syntax(lo)
+                } else {
+                    this.parse_expr_lit()
+                }
+            } else {
+                this.parse_expr_lit()
+            }
+        })
+    }
+
+    fn parse_expr_lit(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+        match self.parse_opt_token_lit() {
+            Some((token_lit, _)) => {
+                let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
+                self.maybe_recover_from_bad_qpath(expr)
+            }
+            None => self.try_macro_suggestion(),
+        }
+    }
+
+    fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+        self.expect(exp!(OpenParen))?;
+        let (es, trailing_comma) = match self.parse_seq_to_end(
+            exp!(CloseParen),
+            SeqSep::trailing_allowed(exp!(Comma)),
+            |p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)),
+        ) {
+            Ok(x) => x,
+            Err(err) => {
+                return Ok(self.recover_seq_parse_error(
+                    exp!(OpenParen),
+                    exp!(CloseParen),
+                    lo,
+                    err,
+                ));
+            }
+        };
+        let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) {
+            // `(e)` is parenthesized `e`.
+            ExprKind::Paren(es.into_iter().next().unwrap())
+        } else {
+            // `(e,)` is a tuple with only one field, `e`.
+            ExprKind::Tup(es)
+        };
+        let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+        self.bump(); // `[` or other open delim
+
+        let kind = if self.eat(close) {
+            // Empty vector
+            ExprKind::Array(ThinVec::new())
+        } else {
+            // Non-empty vector
+            let first_expr = self.parse_expr()?;
+            if self.eat(exp!(Semi)) {
+                // Repeating array syntax: `[ 0; 512 ]`
+                let count = self.parse_expr_anon_const()?;
+                self.expect(close)?;
+                ExprKind::Repeat(first_expr, count)
+            } else if self.eat(exp!(Comma)) {
+                // Vector with two or more elements.
+                let sep = SeqSep::trailing_allowed(exp!(Comma));
+                let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
+                exprs.insert(0, first_expr);
+                ExprKind::Array(exprs)
+            } else {
+                // Vector with one element
+                self.expect(close)?;
+                ExprKind::Array(thin_vec![first_expr])
+            }
+        };
+        let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
+        let maybe_eq_tok = self.prev_token;
+        let (qself, path) = if self.eat_lt() {
+            let lt_span = self.prev_token.span;
+            let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
+                // Suggests using '<=' if there is an error parsing qpath when the previous token
+                // is an '=' token. Only emits suggestion if the '<' token and '=' token are
+                // directly adjacent (i.e. '=<')
+                if maybe_eq_tok == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
+                    let eq_lt = maybe_eq_tok.span.to(lt_span);
+                    err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified);
+                }
+                err
+            })?;
+            (Some(qself), path)
+        } else {
+            (None, self.parse_path(PathStyle::Expr)?)
+        };
+
+        // `!`, as an operator, is prefix, so we know this isn't that.
+        let (span, kind) = if self.eat(exp!(Bang)) {
+            // MACRO INVOCATION expression
+            if qself.is_some() {
+                self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
+            }
+            let lo = path.span;
+            let mac = P(MacCall { path, args: self.parse_delim_args()? });
+            (lo.to(self.prev_token.span), ExprKind::MacCall(mac))
+        } else if self.check(exp!(OpenBrace))
+            && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
+        {
+            if qself.is_some() {
+                self.psess.gated_spans.gate(sym::more_qualified_paths, path.span);
+            }
+            return expr;
+        } else {
+            (path.span, ExprKind::Path(qself, path))
+        };
+
+        let expr = self.mk_expr(span, kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `'label: $expr`. The label is already parsed.
+    pub(super) fn parse_expr_labeled(
+        &mut self,
+        label_: Label,
+        mut consume_colon: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let lo = label_.ident.span;
+        let label = Some(label_);
+        let ate_colon = self.eat(exp!(Colon));
+        let tok_sp = self.token.span;
+        let expr = if self.eat_keyword(exp!(While)) {
+            self.parse_expr_while(label, lo)
+        } else if self.eat_keyword(exp!(For)) {
+            self.parse_expr_for(label, lo)
+        } else if self.eat_keyword(exp!(Loop)) {
+            self.parse_expr_loop(label, lo)
+        } else if self.check_noexpect(&token::OpenBrace) || self.token.is_metavar_block() {
+            self.parse_expr_block(label, lo, BlockCheckMode::Default)
+        } else if !ate_colon
+            && self.may_recover()
+            && (self.token.kind.close_delim().is_some() || self.token.is_punct())
+            && could_be_unclosed_char_literal(label_.ident)
+        {
+            let (lit, _) =
+                self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| {
+                    self_.dcx().create_err(errors::UnexpectedTokenAfterLabel {
+                        span: self_.token.span,
+                        remove_label: None,
+                        enclose_in_block: None,
+                    })
+                });
+            consume_colon = false;
+            Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
+        } else if !ate_colon
+            && (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
+        {
+            // We're probably inside of a `Path<'a>` that needs a turbofish
+            let guar = self.dcx().emit_err(errors::UnexpectedTokenAfterLabel {
+                span: self.token.span,
+                remove_label: None,
+                enclose_in_block: None,
+            });
+            consume_colon = false;
+            Ok(self.mk_expr_err(lo, guar))
+        } else {
+            let mut err = errors::UnexpectedTokenAfterLabel {
+                span: self.token.span,
+                remove_label: None,
+                enclose_in_block: None,
+            };
+
+            // Continue as an expression in an effort to recover on `'label: non_block_expr`.
+            let expr = self.parse_expr().map(|expr| {
+                let span = expr.span;
+
+                let found_labeled_breaks = {
+                    struct FindLabeledBreaksVisitor;
+
+                    impl<'ast> Visitor<'ast> for FindLabeledBreaksVisitor {
+                        type Result = ControlFlow<()>;
+                        fn visit_expr(&mut self, ex: &'ast Expr) -> ControlFlow<()> {
+                            if let ExprKind::Break(Some(_label), _) = ex.kind {
+                                ControlFlow::Break(())
+                            } else {
+                                walk_expr(self, ex)
+                            }
+                        }
+                    }
+
+                    FindLabeledBreaksVisitor.visit_expr(&expr).is_break()
+                };
+
+                // Suggestion involves adding a labeled block.
+                //
+                // If there are no breaks that may use this label, suggest removing the label and
+                // recover to the unmodified expression.
+                if !found_labeled_breaks {
+                    err.remove_label = Some(lo.until(span));
+
+                    return expr;
+                }
+
+                err.enclose_in_block = Some(errors::UnexpectedTokenAfterLabelSugg {
+                    left: span.shrink_to_lo(),
+                    right: span.shrink_to_hi(),
+                });
+
+                // Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
+                let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
+                let blk = self.mk_block(thin_vec![stmt], BlockCheckMode::Default, span);
+                self.mk_expr(span, ExprKind::Block(blk, label))
+            });
+
+            self.dcx().emit_err(err);
+            expr
+        }?;
+
+        if !ate_colon && consume_colon {
+            self.dcx().emit_err(errors::RequireColonAfterLabeledExpression {
+                span: expr.span,
+                label: lo,
+                label_end: lo.between(tok_sp),
+            });
+        }
+
+        Ok(expr)
+    }
+
+    /// Emit an error when a char is parsed as a lifetime or label because of a missing quote.
+    pub(super) fn recover_unclosed_char<L>(
+        &self,
+        ident: Ident,
+        mk_lit_char: impl FnOnce(Symbol, Span) -> L,
+        err: impl FnOnce(&Self) -> Diag<'a>,
+    ) -> L {
+        assert!(could_be_unclosed_char_literal(ident));
+        self.dcx()
+            .try_steal_modify_and_emit_err(ident.span, StashKey::LifetimeIsChar, |err| {
+                err.span_suggestion_verbose(
+                    ident.span.shrink_to_hi(),
+                    "add `'` to close the char literal",
+                    "'",
+                    Applicability::MaybeIncorrect,
+                );
+            })
+            .unwrap_or_else(|| {
+                err(self)
+                    .with_span_suggestion_verbose(
+                        ident.span.shrink_to_hi(),
+                        "add `'` to close the char literal",
+                        "'",
+                        Applicability::MaybeIncorrect,
+                    )
+                    .emit()
+            });
+        let name = ident.without_first_quote().name;
+        mk_lit_char(name, ident.span)
+    }
+
+    /// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead.
+    fn recover_do_catch(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+
+        self.bump(); // `do`
+        self.bump(); // `catch`
+
+        let span = lo.to(self.prev_token.span);
+        self.dcx().emit_err(errors::DoCatchSyntaxRemoved { span });
+
+        self.parse_try_block(lo)
+    }
+
+    /// Parse an expression if the token can begin one.
+    fn parse_expr_opt(&mut self) -> PResult<'a, Option<P<Expr>>> {
+        Ok(if self.token.can_begin_expr() { Some(self.parse_expr()?) } else { None })
+    }
+
+    /// Parse `"return" expr?`.
+    fn parse_expr_return(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.prev_token.span;
+        let kind = ExprKind::Ret(self.parse_expr_opt()?);
+        let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `"do" "yeet" expr?`.
+    fn parse_expr_yeet(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+
+        self.bump(); // `do`
+        self.bump(); // `yeet`
+
+        let kind = ExprKind::Yeet(self.parse_expr_opt()?);
+
+        let span = lo.to(self.prev_token.span);
+        self.psess.gated_spans.gate(sym::yeet_expr, span);
+        let expr = self.mk_expr(span, kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `"become" expr`, with `"become"` token already eaten.
+    fn parse_expr_become(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.prev_token.span;
+        let kind = ExprKind::Become(self.parse_expr()?);
+        let span = lo.to(self.prev_token.span);
+        self.psess.gated_spans.gate(sym::explicit_tail_calls, span);
+        let expr = self.mk_expr(span, kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten.
+    /// If the label is followed immediately by a `:` token, the label and `:` are
+    /// parsed as part of the expression (i.e. a labeled loop). The language team has
+    /// decided in #87026 to require parentheses as a visual aid to avoid confusion if
+    /// the break expression of an unlabeled break is a labeled loop (as in
+    /// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
+    /// expression only gets a warning for compatibility reasons; and a labeled break
+    /// with a labeled loop does not even get a warning because there is no ambiguity.
+    fn parse_expr_break(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.prev_token.span;
+        let mut label = self.eat_label();
+        let kind = if self.token == token::Colon
+            && let Some(label) = label.take()
+        {
+            // The value expression can be a labeled loop, see issue #86948, e.g.:
+            // `loop { break 'label: loop { break 'label 42; }; }`
+            let lexpr = self.parse_expr_labeled(label, true)?;
+            self.dcx().emit_err(errors::LabeledLoopInBreak {
+                span: lexpr.span,
+                sub: errors::WrapInParentheses::Expression {
+                    left: lexpr.span.shrink_to_lo(),
+                    right: lexpr.span.shrink_to_hi(),
+                },
+            });
+            Some(lexpr)
+        } else if self.token != token::OpenBrace
+            || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
+        {
+            let mut expr = self.parse_expr_opt()?;
+            if let Some(expr) = &mut expr {
+                if label.is_some()
+                    && match &expr.kind {
+                        ExprKind::While(_, _, None)
+                        | ExprKind::ForLoop { label: None, .. }
+                        | ExprKind::Loop(_, None, _) => true,
+                        ExprKind::Block(block, None) => {
+                            matches!(block.rules, BlockCheckMode::Default)
+                        }
+                        _ => false,
+                    }
+                {
+                    self.psess.buffer_lint(
+                        BREAK_WITH_LABEL_AND_LOOP,
+                        lo.to(expr.span),
+                        ast::CRATE_NODE_ID,
+                        BuiltinLintDiag::BreakWithLabelAndLoop(expr.span),
+                    );
+                }
+
+                // Recover `break label aaaaa`
+                if self.may_recover()
+                    && let ExprKind::Path(None, p) = &expr.kind
+                    && let [segment] = &*p.segments
+                    && let &ast::PathSegment { ident, args: None, .. } = segment
+                    && let Some(next) = self.parse_expr_opt()?
+                {
+                    label = Some(self.recover_ident_into_label(ident));
+                    *expr = next;
+                }
+            }
+
+            expr
+        } else {
+            None
+        };
+        let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind));
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `"continue" label?`.
+    fn parse_expr_continue(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let mut label = self.eat_label();
+
+        // Recover `continue label` -> `continue 'label`
+        if self.may_recover()
+            && label.is_none()
+            && let Some((ident, _)) = self.token.ident()
+        {
+            self.bump();
+            label = Some(self.recover_ident_into_label(ident));
+        }
+
+        let kind = ExprKind::Continue(label);
+        Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
+    }
+
+    /// Parse `"yield" expr?`.
+    fn parse_expr_yield(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.prev_token.span;
+        let kind = ExprKind::Yield(YieldKind::Prefix(self.parse_expr_opt()?));
+        let span = lo.to(self.prev_token.span);
+        self.psess.gated_spans.gate(sym::yield_expr, span);
+        let expr = self.mk_expr(span, kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `builtin # ident(args,*)`.
+    fn parse_expr_builtin(&mut self) -> PResult<'a, P<Expr>> {
+        self.parse_builtin(|this, lo, ident| {
+            Ok(match ident.name {
+                sym::offset_of => Some(this.parse_expr_offset_of(lo)?),
+                sym::type_ascribe => Some(this.parse_expr_type_ascribe(lo)?),
+                sym::wrap_binder => {
+                    Some(this.parse_expr_unsafe_binder_cast(lo, UnsafeBinderCastKind::Wrap)?)
+                }
+                sym::unwrap_binder => {
+                    Some(this.parse_expr_unsafe_binder_cast(lo, UnsafeBinderCastKind::Unwrap)?)
+                }
+                _ => None,
+            })
+        })
+    }
+
+    pub(crate) fn parse_builtin<T>(
+        &mut self,
+        parse: impl FnOnce(&mut Parser<'a>, Span, Ident) -> PResult<'a, Option<T>>,
+    ) -> PResult<'a, T> {
+        let lo = self.token.span;
+
+        self.bump(); // `builtin`
+        self.bump(); // `#`
+
+        let Some((ident, IdentIsRaw::No)) = self.token.ident() else {
+            let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
+            return Err(err);
+        };
+        self.psess.gated_spans.gate(sym::builtin_syntax, ident.span);
+        self.bump();
+
+        self.expect(exp!(OpenParen))?;
+        let ret = if let Some(res) = parse(self, lo, ident)? {
+            Ok(res)
+        } else {
+            let err = self.dcx().create_err(errors::UnknownBuiltinConstruct {
+                span: lo.to(ident.span),
+                name: ident,
+            });
+            return Err(err);
+        };
+        self.expect(exp!(CloseParen))?;
+
+        ret
+    }
+
+    /// Built-in macro for `offset_of!` expressions.
+    pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let container = self.parse_ty()?;
+        self.expect(exp!(Comma))?;
+
+        let fields = self.parse_floating_field_access()?;
+        let trailing_comma = self.eat_noexpect(&TokenKind::Comma);
+
+        if let Err(mut e) = self.expect_one_of(&[], &[exp!(CloseParen)]) {
+            if trailing_comma {
+                e.note("unexpected third argument to offset_of");
+            } else {
+                e.note("offset_of expects dot-separated field and variant names");
+            }
+            e.emit();
+        }
+
+        // Eat tokens until the macro call ends.
+        if self.may_recover() {
+            while !self.token.kind.is_close_delim_or_eof() {
+                self.bump();
+            }
+        }
+
+        let span = lo.to(self.token.span);
+        Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields)))
+    }
+
+    /// Built-in macro for type ascription expressions.
+    pub(crate) fn parse_expr_type_ascribe(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let expr = self.parse_expr()?;
+        self.expect(exp!(Comma))?;
+        let ty = self.parse_ty()?;
+        let span = lo.to(self.token.span);
+        Ok(self.mk_expr(span, ExprKind::Type(expr, ty)))
+    }
+
+    pub(crate) fn parse_expr_unsafe_binder_cast(
+        &mut self,
+        lo: Span,
+        kind: UnsafeBinderCastKind,
+    ) -> PResult<'a, P<Expr>> {
+        let expr = self.parse_expr()?;
+        let ty = if self.eat(exp!(Comma)) { Some(self.parse_ty()?) } else { None };
+        let span = lo.to(self.token.span);
+        Ok(self.mk_expr(span, ExprKind::UnsafeBinderCast(kind, expr, ty)))
+    }
+
+    /// Returns a string literal if the next token is a string literal.
+    /// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
+    /// and returns `None` if the next token is not literal at all.
+    pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<MetaItemLit>> {
+        match self.parse_opt_meta_item_lit() {
+            Some(lit) => match lit.kind {
+                ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
+                    style,
+                    symbol: lit.symbol,
+                    suffix: lit.suffix,
+                    span: lit.span,
+                    symbol_unescaped,
+                }),
+                _ => Err(Some(lit)),
+            },
+            None => Err(None),
+        }
+    }
+
+    pub(crate) fn mk_token_lit_char(name: Symbol, span: Span) -> (token::Lit, Span) {
+        (token::Lit { symbol: name, suffix: None, kind: token::Char }, span)
+    }
+
+    fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit {
+        ast::MetaItemLit {
+            symbol: name,
+            suffix: None,
+            kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
+            span,
+        }
+    }
+
+    fn handle_missing_lit<L>(
+        &mut self,
+        mk_lit_char: impl FnOnce(Symbol, Span) -> L,
+    ) -> PResult<'a, L> {
+        let token = self.token;
+        let err = |self_: &Self| {
+            let msg = format!("unexpected token: {}", super::token_descr(&token));
+            self_.dcx().struct_span_err(token.span, msg)
+        };
+        // On an error path, eagerly consider a lifetime to be an unclosed character lit, if that
+        // makes sense.
+        if let Some((ident, IdentIsRaw::No)) = self.token.lifetime()
+            && could_be_unclosed_char_literal(ident)
+        {
+            let lt = self.expect_lifetime();
+            Ok(self.recover_unclosed_char(lt.ident, mk_lit_char, err))
+        } else {
+            Err(err(self))
+        }
+    }
+
+    pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
+        self.parse_opt_token_lit()
+            .ok_or(())
+            .or_else(|()| self.handle_missing_lit(Parser::mk_token_lit_char))
+    }
+
+    pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> {
+        self.parse_opt_meta_item_lit()
+            .ok_or(())
+            .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char))
+    }
+
+    fn recover_after_dot(&mut self) {
+        if self.token == token::Dot {
+            // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
+            // dot would follow an optional literal, so we do this unconditionally.
+            let recovered = self.look_ahead(1, |next_token| {
+                // If it's an integer that looks like a float, then recover as such.
+                //
+                // We will never encounter the exponent part of a floating
+                // point literal here, since there's no use of the exponent
+                // syntax that also constitutes a valid integer, so we need
+                // not check for that.
+                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
+                    next_token.kind
+                    && suffix.is_none_or(|s| s == sym::f32 || s == sym::f64)
+                    && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_')
+                    && self.token.span.hi() == next_token.span.lo()
+                {
+                    let s = String::from("0.") + symbol.as_str();
+                    let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
+                    Some(Token::new(kind, self.token.span.to(next_token.span)))
+                } else {
+                    None
+                }
+            });
+            if let Some(recovered) = recovered {
+                self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart {
+                    span: recovered.span,
+                    suggestion: recovered.span.shrink_to_lo(),
+                });
+                self.bump();
+                self.token = recovered;
+            }
+        }
+    }
+
+    /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and
+    /// `Lit::from_token` (excluding unary negation).
+    fn eat_token_lit(&mut self) -> Option<token::Lit> {
+        let check_expr = |expr: P<Expr>| {
+            if let ast::ExprKind::Lit(token_lit) = expr.kind {
+                Some(token_lit)
+            } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind
+                && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner
+            {
+                None
+            } else {
+                panic!("unexpected reparsed expr/literal: {:?}", expr.kind);
+            }
+        };
+        match self.token.uninterpolate().kind {
+            token::Ident(name, IdentIsRaw::No) if name.is_bool_lit() => {
+                self.bump();
+                Some(token::Lit::new(token::Bool, name, None))
+            }
+            token::Literal(token_lit) => {
+                self.bump();
+                Some(token_lit)
+            }
+            token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Literal)) => {
+                let lit = self
+                    .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
+                    .expect("metavar seq literal");
+                check_expr(lit)
+            }
+            token::OpenInvisible(InvisibleOrigin::MetaVar(
+                mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. },
+            )) => {
+                let expr = self
+                    .eat_metavar_seq(mv_kind, |this| this.parse_expr())
+                    .expect("metavar seq expr");
+                check_expr(expr)
+            }
+            _ => None,
+        }
+    }
+
+    /// Matches `lit = true | false | token_lit`.
+    /// Returns `None` if the next token is not a literal.
+    fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
+        self.recover_after_dot();
+        let span = self.token.span;
+        self.eat_token_lit().map(|token_lit| (token_lit, span))
+    }
+
+    /// Matches `lit = true | false | token_lit`.
+    /// Returns `None` if the next token is not a literal.
+    fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
+        self.recover_after_dot();
+        let span = self.token.span;
+        let uninterpolated_span = self.token_uninterpolated_span();
+        self.eat_token_lit().map(|token_lit| {
+            match MetaItemLit::from_token_lit(token_lit, span) {
+                Ok(lit) => lit,
+                Err(err) => {
+                    let guar = report_lit_error(&self.psess, err, token_lit, uninterpolated_span);
+                    // Pack possible quotes and prefixes from the original literal into
+                    // the error literal's symbol so they can be pretty-printed faithfully.
+                    let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None);
+                    let symbol = Symbol::intern(&suffixless_lit.to_string());
+                    let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix);
+                    MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap()
+                }
+            }
+        })
+    }
+
+    pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) {
+        if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
+            // #59553: warn instead of reject out of hand to allow the fix to percolate
+            // through the ecosystem when people fix their macros
+            self.dcx().emit_warn(errors::InvalidLiteralSuffixOnTupleIndex {
+                span,
+                suffix,
+                exception: true,
+            });
+        } else {
+            self.dcx().emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
+                span,
+                suffix,
+                exception: false,
+            });
+        }
+    }
+
+    /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
+    /// Keep this in sync with `Token::can_begin_literal_maybe_minus`.
+    pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
+        if let Some(expr) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
+            |this| {
+                // FIXME(nnethercote) The `expr` case should only match if
+                // `e` is an `ExprKind::Lit` or an `ExprKind::Unary` containing
+                // an `UnOp::Neg` and an `ExprKind::Lit`, like how
+                // `can_begin_literal_maybe_minus` works. But this method has
+                // been over-accepting for a long time, and to make that change
+                // here requires also changing some `parse_literal_maybe_minus`
+                // call sites to accept additional expression kinds. E.g.
+                // `ExprKind::Path` must be accepted when parsing range
+                // patterns. That requires some care. So for now, we continue
+                // being less strict here than we should be.
+                this.parse_expr()
+            },
+        ) {
+            return Ok(expr);
+        } else if let Some(lit) =
+            self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
+        {
+            return Ok(lit);
+        }
+
+        let lo = self.token.span;
+        let minus_present = self.eat(exp!(Minus));
+        let (token_lit, span) = self.parse_token_lit()?;
+        let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
+
+        if minus_present {
+            Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
+        } else {
+            Ok(expr)
+        }
+    }
+
+    fn is_array_like_block(&mut self) -> bool {
+        self.token.kind == TokenKind::OpenBrace
+            && self
+                .look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_)))
+            && self.look_ahead(2, |t| t == &token::Comma)
+            && self.look_ahead(3, |t| t.can_begin_expr())
+    }
+
+    /// Emits a suggestion if it looks like the user meant an array but
+    /// accidentally used braces, causing the code to be interpreted as a block
+    /// expression.
+    fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
+        let mut snapshot = self.create_snapshot_for_diagnostic();
+        match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) {
+            Ok(arr) => {
+                let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfBraces {
+                    span: arr.span,
+                    sub: errors::ArrayBracketsInsteadOfBracesSugg {
+                        left: lo,
+                        right: snapshot.prev_token.span,
+                    },
+                });
+
+                self.restore_snapshot(snapshot);
+                Some(self.mk_expr_err(arr.span, guar))
+            }
+            Err(e) => {
+                e.cancel();
+                None
+            }
+        }
+    }
+
+    fn suggest_missing_semicolon_before_array(
+        &self,
+        prev_span: Span,
+        open_delim_span: Span,
+    ) -> PResult<'a, ()> {
+        if !self.may_recover() {
+            return Ok(());
+        }
+
+        if self.token == token::Comma {
+            if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) {
+                return Ok(());
+            }
+            let mut snapshot = self.create_snapshot_for_diagnostic();
+            snapshot.bump();
+            match snapshot.parse_seq_to_before_end(
+                exp!(CloseBracket),
+                SeqSep::trailing_allowed(exp!(Comma)),
+                |p| p.parse_expr(),
+            ) {
+                Ok(_)
+                    // When the close delim is `)`, `token.kind` is expected to be `token::CloseParen`,
+                    // but the actual `token.kind` is `token::CloseBracket`.
+                    // This is because the `token.kind` of the close delim is treated as the same as
+                    // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
+                    // Therefore, `token.kind` should not be compared here.
+                    if snapshot
+                        .span_to_snippet(snapshot.token.span)
+                        .is_ok_and(|snippet| snippet == "]") =>
+                {
+                    return Err(self.dcx().create_err(errors::MissingSemicolonBeforeArray {
+                        open_delim: open_delim_span,
+                        semicolon: prev_span.shrink_to_hi(),
+                    }));
+                }
+                Ok(_) => (),
+                Err(err) => err.cancel(),
+            }
+        }
+        Ok(())
+    }
+
+    /// Parses a block or unsafe block.
+    pub(super) fn parse_expr_block(
+        &mut self,
+        opt_label: Option<Label>,
+        lo: Span,
+        blk_mode: BlockCheckMode,
+    ) -> PResult<'a, P<Expr>> {
+        if self.may_recover() && self.is_array_like_block() {
+            if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo) {
+                return Ok(arr);
+            }
+        }
+
+        if self.token.is_metavar_block() {
+            self.dcx().emit_err(errors::InvalidBlockMacroSegment {
+                span: self.token.span,
+                context: lo.to(self.token.span),
+                wrap: errors::WrapInExplicitBlock {
+                    lo: self.token.span.shrink_to_lo(),
+                    hi: self.token.span.shrink_to_hi(),
+                },
+            });
+        }
+
+        let (attrs, blk) = self.parse_block_common(lo, blk_mode, None)?;
+        Ok(self.mk_expr_with_attrs(blk.span, ExprKind::Block(blk, opt_label), attrs))
+    }
+
+    /// Parse a block which takes no attributes and has no label
+    fn parse_simple_block(&mut self) -> PResult<'a, P<Expr>> {
+        let blk = self.parse_block()?;
+        Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None)))
+    }
+
+    /// Parses a closure expression (e.g., `move |args| expr`).
+    fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+
+        let before = self.prev_token;
+        let binder = if self.check_keyword(exp!(For)) {
+            let lo = self.token.span;
+            let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
+            let span = lo.to(self.prev_token.span);
+
+            self.psess.gated_spans.gate(sym::closure_lifetime_binder, span);
+
+            ClosureBinder::For { span, generic_params: lifetime_defs }
+        } else {
+            ClosureBinder::NotPresent
+        };
+
+        let constness = self.parse_closure_constness();
+
+        let movability =
+            if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable };
+
+        let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() {
+            self.parse_coroutine_kind(Case::Sensitive)
+        } else {
+            None
+        };
+
+        if let ClosureBinder::NotPresent = binder
+            && coroutine_kind.is_some()
+        {
+            // coroutine closures and generators can have the same qualifiers, so we might end up
+            // in here if there is a missing `|` but also no `{`. Adjust the expectations in that case.
+            self.expected_token_types.insert(TokenType::OpenBrace);
+        }
+
+        let capture_clause = self.parse_capture_clause()?;
+        let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?;
+        let decl_hi = self.prev_token.span;
+        let mut body = match &fn_decl.output {
+            // No return type.
+            FnRetTy::Default(_) => {
+                let restrictions =
+                    self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
+                let prev = self.prev_token;
+                let token = self.token;
+                let attrs = self.parse_outer_attributes()?;
+                match self.parse_expr_res(restrictions, attrs) {
+                    Ok((expr, _)) => expr,
+                    Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
+                }
+            }
+            // Explicit return type (`->`) needs block `-> T { }`.
+            FnRetTy::Ty(ty) => self.parse_closure_block_body(ty.span)?,
+        };
+
+        match coroutine_kind {
+            Some(CoroutineKind::Async { .. }) => {}
+            Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
+                // Feature-gate `gen ||` and `async gen ||` closures.
+                // FIXME(gen_blocks): This perhaps should be a different gate.
+                self.psess.gated_spans.gate(sym::gen_blocks, span);
+            }
+            None => {}
+        }
+
+        if self.token == TokenKind::Semi
+            && let Some(last) = self.token_cursor.stack.last()
+            && let Some(TokenTree::Delimited(_, _, Delimiter::Parenthesis, _)) = last.curr()
+            && self.may_recover()
+        {
+            // It is likely that the closure body is a block but where the
+            // braces have been removed. We will recover and eat the next
+            // statements later in the parsing process.
+            body = self.mk_expr_err(
+                body.span,
+                self.dcx().span_delayed_bug(body.span, "recovered a closure body as a block"),
+            );
+        }
+
+        let body_span = body.span;
+
+        let closure = self.mk_expr(
+            lo.to(body.span),
+            ExprKind::Closure(Box::new(ast::Closure {
+                binder,
+                capture_clause,
+                constness,
+                coroutine_kind,
+                movability,
+                fn_decl,
+                body,
+                fn_decl_span: lo.to(decl_hi),
+                fn_arg_span,
+            })),
+        );
+
+        // Disable recovery for closure body
+        let spans =
+            ClosureSpans { whole_closure: closure.span, closing_pipe: decl_hi, body: body_span };
+        self.current_closure = Some(spans);
+
+        Ok(closure)
+    }
+
+    /// If an explicit return type is given, require a block to appear (RFC 968).
+    fn parse_closure_block_body(&mut self, ret_span: Span) -> PResult<'a, P<Expr>> {
+        if self.may_recover()
+            && self.token.can_begin_expr()
+            && self.token.kind != TokenKind::OpenBrace
+            && !self.token.is_metavar_block()
+        {
+            let snapshot = self.create_snapshot_for_diagnostic();
+            let restrictions =
+                self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
+            let tok = self.token.clone();
+            match self.parse_expr_res(restrictions, AttrWrapper::empty()) {
+                Ok((expr, _)) => {
+                    let descr = super::token_descr(&tok);
+                    let mut diag = self
+                        .dcx()
+                        .struct_span_err(tok.span, format!("expected `{{`, found {descr}"));
+                    diag.span_label(
+                        ret_span,
+                        "explicit return type requires closure body to be enclosed in braces",
+                    );
+                    diag.multipart_suggestion_verbose(
+                        "wrap the expression in curly braces",
+                        vec![
+                            (expr.span.shrink_to_lo(), "{ ".to_string()),
+                            (expr.span.shrink_to_hi(), " }".to_string()),
+                        ],
+                        Applicability::MachineApplicable,
+                    );
+                    diag.emit();
+                    return Ok(expr);
+                }
+                Err(diag) => {
+                    diag.cancel();
+                    self.restore_snapshot(snapshot);
+                }
+            }
+        }
+
+        let body_lo = self.token.span;
+        self.parse_expr_block(None, body_lo, BlockCheckMode::Default)
+    }
+
+    /// Parses an optional `move` or `use` prefix to a closure-like construct.
+    fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
+        if self.eat_keyword(exp!(Move)) {
+            let move_kw_span = self.prev_token.span;
+            // Check for `move async` and recover
+            if self.check_keyword(exp!(Async)) {
+                let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
+                Err(self
+                    .dcx()
+                    .create_err(errors::AsyncMoveOrderIncorrect { span: move_async_span }))
+            } else {
+                Ok(CaptureBy::Value { move_kw: move_kw_span })
+            }
+        } else if self.eat_keyword(exp!(Use)) {
+            let use_kw_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::ergonomic_clones, use_kw_span);
+            // Check for `use async` and recover
+            if self.check_keyword(exp!(Async)) {
+                let use_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
+                Err(self.dcx().create_err(errors::AsyncUseOrderIncorrect { span: use_async_span }))
+            } else {
+                Ok(CaptureBy::Use { use_kw: use_kw_span })
+            }
+        } else {
+            Ok(CaptureBy::Ref)
+        }
+    }
+
+    /// Parses the `|arg, arg|` header of a closure.
+    fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
+        let arg_start = self.token.span.lo();
+
+        let inputs = if self.eat(exp!(OrOr)) {
+            ThinVec::new()
+        } else {
+            self.expect(exp!(Or))?;
+            let args = self
+                .parse_seq_to_before_tokens(
+                    &[exp!(Or)],
+                    &[&token::OrOr],
+                    SeqSep::trailing_allowed(exp!(Comma)),
+                    |p| p.parse_fn_block_param(),
+                )?
+                .0;
+            self.expect_or()?;
+            args
+        };
+        let arg_span = self.prev_token.span.with_lo(arg_start);
+        let output =
+            self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
+
+        Ok((P(FnDecl { inputs, output }), arg_span))
+    }
+
+    /// Parses a parameter in a closure header (e.g., `|arg, arg|`).
+    fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
+        let lo = self.token.span;
+        let attrs = self.parse_outer_attributes()?;
+        self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
+            let ty = if this.eat(exp!(Colon)) {
+                this.parse_ty()?
+            } else {
+                this.mk_ty(pat.span, TyKind::Infer)
+            };
+
+            Ok((
+                Param {
+                    attrs,
+                    ty,
+                    pat,
+                    span: lo.to(this.prev_token.span),
+                    id: DUMMY_NODE_ID,
+                    is_placeholder: false,
+                },
+                Trailing::from(this.token == token::Comma),
+                UsePreAttrPos::No,
+            ))
+        })
+    }
+
+    /// Parses an `if` expression (`if` token already eaten).
+    fn parse_expr_if(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.prev_token.span;
+        // Scoping code checks the top level edition of the `if`; let's match it here.
+        // The `CondChecker` also checks the edition of the `let` itself, just to make sure.
+        let let_chains_policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() };
+        let cond = self.parse_expr_cond(let_chains_policy)?;
+        self.parse_if_after_cond(lo, cond)
+    }
+
+    fn parse_if_after_cond(&mut self, lo: Span, mut cond: P<Expr>) -> PResult<'a, P<Expr>> {
+        let cond_span = cond.span;
+        // Tries to interpret `cond` as either a missing expression if it's a block,
+        // or as an unfinished expression if it's a binop and the RHS is a block.
+        // We could probably add more recoveries here too...
+        let mut recover_block_from_condition = |this: &mut Self| {
+            let block = match &mut cond.kind {
+                ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
+                    if let ExprKind::Block(_, None) = right.kind =>
+                {
+                    let guar = this.dcx().emit_err(errors::IfExpressionMissingThenBlock {
+                        if_span: lo,
+                        missing_then_block_sub:
+                            errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(
+                                cond_span.shrink_to_lo().to(*binop_span),
+                            ),
+                        let_else_sub: None,
+                    });
+                    std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi(), guar))
+                }
+                ExprKind::Block(_, None) => {
+                    let guar = this.dcx().emit_err(errors::IfExpressionMissingCondition {
+                        if_span: lo.with_neighbor(cond.span).shrink_to_hi(),
+                        block_span: self.psess.source_map().start_point(cond_span),
+                    });
+                    std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi(), guar))
+                }
+                _ => {
+                    return None;
+                }
+            };
+            if let ExprKind::Block(block, _) = &block.kind {
+                Some(block.clone())
+            } else {
+                unreachable!()
+            }
+        };
+        // Parse then block
+        let thn = if self.token.is_keyword(kw::Else) {
+            if let Some(block) = recover_block_from_condition(self) {
+                block
+            } else {
+                let let_else_sub = matches!(cond.kind, ExprKind::Let(..))
+                    .then(|| errors::IfExpressionLetSomeSub { if_span: lo.until(cond_span) });
+
+                let guar = self.dcx().emit_err(errors::IfExpressionMissingThenBlock {
+                    if_span: lo,
+                    missing_then_block_sub: errors::IfExpressionMissingThenBlockSub::AddThenBlock(
+                        cond_span.shrink_to_hi(),
+                    ),
+                    let_else_sub,
+                });
+                self.mk_block_err(cond_span.shrink_to_hi(), guar)
+            }
+        } else {
+            let attrs = self.parse_outer_attributes()?; // For recovery.
+            let maybe_fatarrow = self.token;
+            let block = if self.check(exp!(OpenBrace)) {
+                self.parse_block()?
+            } else if let Some(block) = recover_block_from_condition(self) {
+                block
+            } else {
+                self.error_on_extra_if(&cond)?;
+                // Parse block, which will always fail, but we can add a nice note to the error
+                self.parse_block().map_err(|mut err| {
+                        if self.prev_token == token::Semi
+                            && self.token == token::AndAnd
+                            && let maybe_let = self.look_ahead(1, |t| t.clone())
+                            && maybe_let.is_keyword(kw::Let)
+                        {
+                            err.span_suggestion(
+                                self.prev_token.span,
+                                "consider removing this semicolon to parse the `let` as part of the same chain",
+                                "",
+                                Applicability::MachineApplicable,
+                            ).span_note(
+                                self.token.span.to(maybe_let.span),
+                                "you likely meant to continue parsing the let-chain starting here",
+                            );
+                        } else {
+                            // Look for usages of '=>' where '>=' might be intended
+                            if maybe_fatarrow == token::FatArrow {
+                                err.span_suggestion(
+                                    maybe_fatarrow.span,
+                                    "you might have meant to write a \"greater than or equal to\" comparison",
+                                    ">=",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                            err.span_note(
+                                cond_span,
+                                "the `if` expression is missing a block after this condition",
+                            );
+                        }
+                        err
+                    })?
+            };
+            self.error_on_if_block_attrs(lo, false, block.span, attrs);
+            block
+        };
+        let els = if self.eat_keyword(exp!(Else)) { Some(self.parse_expr_else()?) } else { None };
+        Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
+    }
+
+    /// Parses the condition of a `if` or `while` expression.
+    ///
+    /// The specified `edition` in `let_chains_policy` should be that of the whole `if` construct,
+    /// i.e. the same span we use to later decide whether the drop behaviour should be that of
+    /// edition `..=2021` or that of `2024..`.
+    // Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions.
+    pub fn parse_expr_cond(&mut self, let_chains_policy: LetChainsPolicy) -> PResult<'a, P<Expr>> {
+        let attrs = self.parse_outer_attributes()?;
+        let (mut cond, _) =
+            self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
+
+        CondChecker::new(self, let_chains_policy).visit_expr(&mut cond);
+
+        Ok(cond)
+    }
+
+    /// Parses a `let $pat = $expr` pseudo-expression.
+    fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
+        let recovered = if !restrictions.contains(Restrictions::ALLOW_LET) {
+            let err = errors::ExpectedExpressionFoundLet {
+                span: self.token.span,
+                reason: ForbiddenLetReason::OtherForbidden,
+                missing_let: None,
+                comparison: None,
+            };
+            if self.prev_token == token::Or {
+                // This was part of a closure, the that part of the parser recover.
+                return Err(self.dcx().create_err(err));
+            } else {
+                Recovered::Yes(self.dcx().emit_err(err))
+            }
+        } else {
+            Recovered::No
+        };
+        self.bump(); // Eat `let` token
+        let lo = self.prev_token.span;
+        let pat = self.parse_pat_no_top_guard(
+            None,
+            RecoverComma::Yes,
+            RecoverColon::Yes,
+            CommaRecoveryMode::LikelyTuple,
+        )?;
+        if self.token == token::EqEq {
+            self.dcx().emit_err(errors::ExpectedEqForLetExpr {
+                span: self.token.span,
+                sugg_span: self.token.span,
+            });
+            self.bump();
+        } else {
+            self.expect(exp!(Eq))?;
+        }
+        let attrs = self.parse_outer_attributes()?;
+        let (expr, _) =
+            self.parse_expr_assoc_with(Bound::Excluded(prec_let_scrutinee_needs_par()), attrs)?;
+        let span = lo.to(expr.span);
+        Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
+    }
+
+    /// Parses an `else { ... }` expression (`else` token already eaten).
+    fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> {
+        let else_span = self.prev_token.span; // `else`
+        let attrs = self.parse_outer_attributes()?; // For recovery.
+        let expr = if self.eat_keyword(exp!(If)) {
+            ensure_sufficient_stack(|| self.parse_expr_if())?
+        } else if self.check(exp!(OpenBrace)) {
+            self.parse_simple_block()?
+        } else {
+            let snapshot = self.create_snapshot_for_diagnostic();
+            let first_tok = super::token_descr(&self.token);
+            let first_tok_span = self.token.span;
+            match self.parse_expr() {
+                Ok(cond)
+                // Try to guess the difference between a "condition-like" vs
+                // "statement-like" expression.
+                //
+                // We are seeing the following code, in which $cond is neither
+                // ExprKind::Block nor ExprKind::If (the 2 cases wherein this
+                // would be valid syntax).
+                //
+                //     if ... {
+                //     } else $cond
+                //
+                // If $cond is "condition-like" such as ExprKind::Binary, we
+                // want to suggest inserting `if`.
+                //
+                //     if ... {
+                //     } else if a == b {
+                //            ^^
+                //     }
+                //
+                // We account for macro calls that were meant as conditions as well.
+                //
+                //     if ... {
+                //     } else if macro! { foo bar } {
+                //            ^^
+                //     }
+                //
+                // If $cond is "statement-like" such as ExprKind::While then we
+                // want to suggest wrapping in braces.
+                //
+                //     if ... {
+                //     } else {
+                //            ^
+                //         while true {}
+                //     }
+                //     ^
+                    if self.check(exp!(OpenBrace))
+                        && (classify::expr_requires_semi_to_be_stmt(&cond)
+                            || matches!(cond.kind, ExprKind::MacCall(..)))
+                    =>
+                {
+                    self.dcx().emit_err(errors::ExpectedElseBlock {
+                        first_tok_span,
+                        first_tok,
+                        else_span,
+                        condition_start: cond.span.shrink_to_lo(),
+                    });
+                    self.parse_if_after_cond(cond.span.shrink_to_lo(), cond)?
+                }
+                Err(e) => {
+                    e.cancel();
+                    self.restore_snapshot(snapshot);
+                    self.parse_simple_block()?
+                },
+                Ok(_) => {
+                    self.restore_snapshot(snapshot);
+                    self.parse_simple_block()?
+                },
+            }
+        };
+        self.error_on_if_block_attrs(else_span, true, expr.span, attrs);
+        Ok(expr)
+    }
+
+    fn error_on_if_block_attrs(
+        &self,
+        ctx_span: Span,
+        is_ctx_else: bool,
+        branch_span: Span,
+        attrs: AttrWrapper,
+    ) {
+        if !attrs.is_empty()
+            && let [x0 @ xn] | [x0, .., xn] = &*attrs.take_for_recovery(self.psess)
+        {
+            let attributes = x0.span.until(branch_span);
+            let last = xn.span;
+            let ctx = if is_ctx_else { "else" } else { "if" };
+            self.dcx().emit_err(errors::OuterAttributeNotAllowedOnIfElse {
+                last,
+                branch_span,
+                ctx_span,
+                ctx: ctx.to_string(),
+                attributes,
+            });
+        }
+    }
+
+    fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
+        if let ExprKind::Binary(Spanned { span: binop_span, node: binop }, _, right) = &cond.kind
+            && let BinOpKind::And = binop
+            && let ExprKind::If(cond, ..) = &right.kind
+        {
+            Err(self.dcx().create_err(errors::UnexpectedIfWithIf(
+                binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()),
+            )))
+        } else {
+            Ok(())
+        }
+    }
+
+    fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
+        let begin_paren = if self.token == token::OpenParen {
+            // Record whether we are about to parse `for (`.
+            // This is used below for recovery in case of `for ( $stuff ) $block`
+            // in which case we will suggest `for $stuff $block`.
+            let start_span = self.token.span;
+            let left = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
+            Some((start_span, left))
+        } else {
+            None
+        };
+        // Try to parse the pattern `for ($PAT) in $EXPR`.
+        let pat = match (
+            self.parse_pat_allow_top_guard(
+                None,
+                RecoverComma::Yes,
+                RecoverColon::Yes,
+                CommaRecoveryMode::LikelyTuple,
+            ),
+            begin_paren,
+        ) {
+            (Ok(pat), _) => pat, // Happy path.
+            (Err(err), Some((start_span, left))) if self.eat_keyword(exp!(In)) => {
+                // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
+                // happen right before the return of this method.
+                let attrs = self.parse_outer_attributes()?;
+                let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
+                    Ok(expr) => expr,
+                    Err(expr_err) => {
+                        // We don't know what followed the `in`, so cancel and bubble up the
+                        // original error.
+                        expr_err.cancel();
+                        return Err(err);
+                    }
+                };
+                return if self.token == token::CloseParen {
+                    // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
+                    // parser state and emit a targeted suggestion.
+                    let span = vec![start_span, self.token.span];
+                    let right = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
+                    self.bump(); // )
+                    err.cancel();
+                    self.dcx().emit_err(errors::ParenthesesInForHead {
+                        span,
+                        // With e.g. `for (x) in y)` this would replace `(x) in y)`
+                        // with `x) in y)` which is syntactically invalid.
+                        // However, this is prevented before we get here.
+                        sugg: errors::ParenthesesInForHeadSugg { left, right },
+                    });
+                    Ok((self.mk_pat(start_span.to(right), ast::PatKind::Wild), expr))
+                } else {
+                    Err(err) // Some other error, bubble up.
+                };
+            }
+            (Err(err), _) => return Err(err), // Some other error, bubble up.
+        };
+        if !self.eat_keyword(exp!(In)) {
+            self.error_missing_in_for_loop();
+        }
+        self.check_for_for_in_in_typo(self.prev_token.span);
+        let attrs = self.parse_outer_attributes()?;
+        let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
+        Ok((pat, expr))
+    }
+
+    /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
+    fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let is_await =
+            self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
+
+        if is_await {
+            self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
+        }
+
+        let kind = if is_await { ForLoopKind::ForAwait } else { ForLoopKind::For };
+
+        let (pat, expr) = self.parse_for_head()?;
+        // Recover from missing expression in `for` loop
+        if matches!(expr.kind, ExprKind::Block(..))
+            && self.token.kind != token::OpenBrace
+            && self.may_recover()
+        {
+            let guar = self
+                .dcx()
+                .emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() });
+            let err_expr = self.mk_expr(expr.span, ExprKind::Err(guar));
+            let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span);
+            return Ok(self.mk_expr(
+                lo.to(self.prev_token.span),
+                ExprKind::ForLoop { pat, iter: err_expr, body: block, label: opt_label, kind },
+            ));
+        }
+
+        let (attrs, loop_block) = self.parse_inner_attrs_and_block(
+            // Only suggest moving erroneous block label to the loop header
+            // if there is not already a label there
+            opt_label.is_none().then_some(lo),
+        )?;
+
+        let kind = ExprKind::ForLoop { pat, iter: expr, body: loop_block, label: opt_label, kind };
+
+        self.recover_loop_else("for", lo)?;
+
+        Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
+    }
+
+    /// Recovers from an `else` clause after a loop (`for...else`, `while...else`)
+    fn recover_loop_else(&mut self, loop_kind: &'static str, loop_kw: Span) -> PResult<'a, ()> {
+        if self.token.is_keyword(kw::Else) && self.may_recover() {
+            let else_span = self.token.span;
+            self.bump();
+            let else_clause = self.parse_expr_else()?;
+            self.dcx().emit_err(errors::LoopElseNotSupported {
+                span: else_span.to(else_clause.span),
+                loop_kind,
+                loop_kw,
+            });
+        }
+        Ok(())
+    }
+
+    fn error_missing_in_for_loop(&mut self) {
+        let (span, sub): (_, fn(_) -> _) = if self.token.is_ident_named(sym::of) {
+            // Possibly using JS syntax (#75311).
+            let span = self.token.span;
+            self.bump();
+            (span, errors::MissingInInForLoopSub::InNotOf)
+        } else {
+            (self.prev_token.span.between(self.token.span), errors::MissingInInForLoopSub::AddIn)
+        };
+
+        self.dcx().emit_err(errors::MissingInInForLoop { span, sub: sub(span) });
+    }
+
+    /// Parses a `while` or `while let` expression (`while` token already eaten).
+    fn parse_expr_while(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() };
+        let cond = self.parse_expr_cond(policy).map_err(|mut err| {
+            err.span_label(lo, "while parsing the condition of this `while` expression");
+            err
+        })?;
+        let (attrs, body) = self
+            .parse_inner_attrs_and_block(
+                // Only suggest moving erroneous block label to the loop header
+                // if there is not already a label there
+                opt_label.is_none().then_some(lo),
+            )
+            .map_err(|mut err| {
+                err.span_label(lo, "while parsing the body of this `while` expression");
+                err.span_label(cond.span, "this `while` condition successfully parsed");
+                err
+            })?;
+
+        self.recover_loop_else("while", lo)?;
+
+        Ok(self.mk_expr_with_attrs(
+            lo.to(self.prev_token.span),
+            ExprKind::While(cond, body, opt_label),
+            attrs,
+        ))
+    }
+
+    /// Parses `loop { ... }` (`loop` token already eaten).
+    fn parse_expr_loop(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let loop_span = self.prev_token.span;
+        let (attrs, body) = self.parse_inner_attrs_and_block(
+            // Only suggest moving erroneous block label to the loop header
+            // if there is not already a label there
+            opt_label.is_none().then_some(lo),
+        )?;
+        self.recover_loop_else("loop", lo)?;
+        Ok(self.mk_expr_with_attrs(
+            lo.to(self.prev_token.span),
+            ExprKind::Loop(body, opt_label, loop_span),
+            attrs,
+        ))
+    }
+
+    pub(crate) fn eat_label(&mut self) -> Option<Label> {
+        if let Some((ident, is_raw)) = self.token.lifetime() {
+            // Disallow `'fn`, but with a better error message than `expect_lifetime`.
+            if matches!(is_raw, IdentIsRaw::No) && ident.without_first_quote().is_reserved() {
+                self.dcx().emit_err(errors::InvalidLabel { span: ident.span, name: ident.name });
+            }
+
+            self.bump();
+            Some(Label { ident })
+        } else {
+            None
+        }
+    }
+
+    /// Parses a `match ... { ... }` expression (`match` token already eaten).
+    fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
+        let match_span = self.prev_token.span;
+        let attrs = self.parse_outer_attributes()?;
+        let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
+
+        self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
+    }
+
+    /// Parses the block of a `match expr { ... }` or a `expr.match { ... }`
+    /// expression. This is after the match token and scrutinee are eaten
+    fn parse_match_block(
+        &mut self,
+        lo: Span,
+        match_span: Span,
+        scrutinee: P<Expr>,
+        match_kind: MatchKind,
+    ) -> PResult<'a, P<Expr>> {
+        if let Err(mut e) = self.expect(exp!(OpenBrace)) {
+            if self.token == token::Semi {
+                e.span_suggestion_short(
+                    match_span,
+                    "try removing this `match`",
+                    "",
+                    Applicability::MaybeIncorrect, // speculative
+                );
+            }
+            if self.maybe_recover_unexpected_block_label(None) {
+                e.cancel();
+                self.bump();
+            } else {
+                return Err(e);
+            }
+        }
+        let attrs = self.parse_inner_attributes()?;
+
+        let mut arms = ThinVec::new();
+        while self.token != token::CloseBrace {
+            match self.parse_arm() {
+                Ok(arm) => arms.push(arm),
+                Err(e) => {
+                    // Recover by skipping to the end of the block.
+                    let guar = e.emit();
+                    self.recover_stmt();
+                    let span = lo.to(self.token.span);
+                    if self.token == token::CloseBrace {
+                        self.bump();
+                    }
+                    // Always push at least one arm to make the match non-empty
+                    arms.push(Arm {
+                        attrs: Default::default(),
+                        pat: self.mk_pat(span, ast::PatKind::Err(guar)),
+                        guard: None,
+                        body: Some(self.mk_expr_err(span, guar)),
+                        span,
+                        id: DUMMY_NODE_ID,
+                        is_placeholder: false,
+                    });
+                    return Ok(self.mk_expr_with_attrs(
+                        span,
+                        ExprKind::Match(scrutinee, arms, match_kind),
+                        attrs,
+                    ));
+                }
+            }
+        }
+        let hi = self.token.span;
+        self.bump();
+        Ok(self.mk_expr_with_attrs(lo.to(hi), ExprKind::Match(scrutinee, arms, match_kind), attrs))
+    }
+
+    /// Attempt to recover from match arm body with statements and no surrounding braces.
+    fn parse_arm_body_missing_braces(
+        &mut self,
+        first_expr: &P<Expr>,
+        arrow_span: Span,
+    ) -> Option<(Span, ErrorGuaranteed)> {
+        if self.token != token::Semi {
+            return None;
+        }
+        let start_snapshot = self.create_snapshot_for_diagnostic();
+        let semi_sp = self.token.span;
+        self.bump(); // `;`
+        let mut stmts =
+            vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
+        let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
+            let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
+
+            let guar = this.dcx().emit_err(errors::MatchArmBodyWithoutBraces {
+                statements: span,
+                arrow: arrow_span,
+                num_statements: stmts.len(),
+                sub: if stmts.len() > 1 {
+                    errors::MatchArmBodyWithoutBracesSugg::AddBraces {
+                        left: span.shrink_to_lo(),
+                        right: span.shrink_to_hi(),
+                    }
+                } else {
+                    errors::MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
+                },
+            });
+            (span, guar)
+        };
+        // We might have either a `,` -> `;` typo, or a block without braces. We need
+        // a more subtle parsing strategy.
+        loop {
+            if self.token == token::CloseBrace {
+                // We have reached the closing brace of the `match` expression.
+                return Some(err(self, stmts));
+            }
+            if self.token == token::Comma {
+                self.restore_snapshot(start_snapshot);
+                return None;
+            }
+            let pre_pat_snapshot = self.create_snapshot_for_diagnostic();
+            match self.parse_pat_no_top_alt(None, None) {
+                Ok(_pat) => {
+                    if self.token == token::FatArrow {
+                        // Reached arm end.
+                        self.restore_snapshot(pre_pat_snapshot);
+                        return Some(err(self, stmts));
+                    }
+                }
+                Err(err) => {
+                    err.cancel();
+                }
+            }
+
+            self.restore_snapshot(pre_pat_snapshot);
+            match self.parse_stmt_without_recovery(true, ForceCollect::No, false) {
+                // Consume statements for as long as possible.
+                Ok(Some(stmt)) => {
+                    stmts.push(stmt);
+                }
+                Ok(None) => {
+                    self.restore_snapshot(start_snapshot);
+                    break;
+                }
+                // We couldn't parse either yet another statement missing it's
+                // enclosing block nor the next arm's pattern or closing brace.
+                Err(stmt_err) => {
+                    stmt_err.cancel();
+                    self.restore_snapshot(start_snapshot);
+                    break;
+                }
+            }
+        }
+        None
+    }
+
+    pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
+        let attrs = self.parse_outer_attributes()?;
+        self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            let lo = this.token.span;
+            let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
+
+            let span_before_body = this.prev_token.span;
+            let arm_body;
+            let is_fat_arrow = this.check(exp!(FatArrow));
+            let is_almost_fat_arrow =
+                TokenKind::FatArrow.similar_tokens().contains(&this.token.kind);
+
+            // this avoids the compiler saying that a `,` or `}` was expected even though
+            // the pattern isn't a never pattern (and thus an arm body is required)
+            let armless = (!is_fat_arrow && !is_almost_fat_arrow && pat.could_be_never_pattern())
+                || matches!(this.token.kind, token::Comma | token::CloseBrace);
+
+            let mut result = if armless {
+                // A pattern without a body, allowed for never patterns.
+                arm_body = None;
+                let span = lo.to(this.prev_token.span);
+                this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map(|x| {
+                    // Don't gate twice
+                    if !pat.contains_never_pattern() {
+                        this.psess.gated_spans.gate(sym::never_patterns, span);
+                    }
+                    x
+                })
+            } else {
+                if let Err(mut err) = this.expect(exp!(FatArrow)) {
+                    // We might have a `=>` -> `=` or `->` typo (issue #89396).
+                    if is_almost_fat_arrow {
+                        err.span_suggestion(
+                            this.token.span,
+                            "use a fat arrow to start a match arm",
+                            "=>",
+                            Applicability::MachineApplicable,
+                        );
+                        if matches!(
+                            (&this.prev_token.kind, &this.token.kind),
+                            (token::DotDotEq, token::Gt)
+                        ) {
+                            // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
+                            // so we suppress the error here
+                            err.delay_as_bug();
+                        } else {
+                            err.emit();
+                        }
+                        this.bump();
+                    } else {
+                        return Err(err);
+                    }
+                }
+                let arrow_span = this.prev_token.span;
+                let arm_start_span = this.token.span;
+
+                let attrs = this.parse_outer_attributes()?;
+                let (expr, _) =
+                    this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
+                        err.span_label(arrow_span, "while parsing the `match` arm starting here");
+                        err
+                    })?;
+
+                let require_comma =
+                    !classify::expr_is_complete(&expr) && this.token != token::CloseBrace;
+
+                if !require_comma {
+                    arm_body = Some(expr);
+                    // Eat a comma if it exists, though.
+                    let _ = this.eat(exp!(Comma));
+                    Ok(Recovered::No)
+                } else if let Some((span, guar)) =
+                    this.parse_arm_body_missing_braces(&expr, arrow_span)
+                {
+                    let body = this.mk_expr_err(span, guar);
+                    arm_body = Some(body);
+                    Ok(Recovered::Yes(guar))
+                } else {
+                    let expr_span = expr.span;
+                    arm_body = Some(expr);
+                    this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map_err(|mut err| {
+                        if this.token == token::FatArrow {
+                            let sm = this.psess.source_map();
+                            if let Ok(expr_lines) = sm.span_to_lines(expr_span)
+                                && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
+                                && expr_lines.lines.len() == 2
+                            {
+                                if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col {
+                                    // We check whether there's any trailing code in the parse span,
+                                    // if there isn't, we very likely have the following:
+                                    //
+                                    // X |     &Y => "y"
+                                    //   |        --    - missing comma
+                                    //   |        |
+                                    //   |        arrow_span
+                                    // X |     &X => "x"
+                                    //   |      - ^^ self.token.span
+                                    //   |      |
+                                    //   |      parsed until here as `"y" & X`
+                                    err.span_suggestion_short(
+                                        arm_start_span.shrink_to_hi(),
+                                        "missing a comma here to end this `match` arm",
+                                        ",",
+                                        Applicability::MachineApplicable,
+                                    );
+                                } else if arm_start_lines.lines[0].end_col + rustc_span::CharPos(1)
+                                    == expr_lines.lines[0].end_col
+                                {
+                                    // similar to the above, but we may typo a `.` or `/` at the end of the line
+                                    let comma_span = arm_start_span
+                                        .shrink_to_hi()
+                                        .with_hi(arm_start_span.hi() + rustc_span::BytePos(1));
+                                    if let Ok(res) = sm.span_to_snippet(comma_span)
+                                        && (res == "." || res == "/")
+                                    {
+                                        err.span_suggestion_short(
+                                            comma_span,
+                                            "you might have meant to write a `,` to end this `match` arm",
+                                            ",",
+                                            Applicability::MachineApplicable,
+                                        );
+                                    }
+                                }
+                            }
+                        } else {
+                            err.span_label(
+                                arrow_span,
+                                "while parsing the `match` arm starting here",
+                            );
+                        }
+                        err
+                    })
+                }
+            };
+
+            let hi_span = arm_body.as_ref().map_or(span_before_body, |body| body.span);
+            let arm_span = lo.to(hi_span);
+
+            // We want to recover:
+            // X |     Some(_) => foo()
+            //   |                     - missing comma
+            // X |     None => "x"
+            //   |     ^^^^ self.token.span
+            // as well as:
+            // X |     Some(!)
+            //   |            - missing comma
+            // X |     None => "x"
+            //   |     ^^^^ self.token.span
+            // But we musn't recover
+            // X |     pat[0] => {}
+            //   |        ^ self.token.span
+            let recover_missing_comma = arm_body.is_some() || pat.could_be_never_pattern();
+            if recover_missing_comma {
+                result = result.or_else(|err| {
+                    // FIXME(compiler-errors): We could also recover `; PAT =>` here
+
+                    // Try to parse a following `PAT =>`, if successful
+                    // then we should recover.
+                    let mut snapshot = this.create_snapshot_for_diagnostic();
+                    let pattern_follows = snapshot
+                        .parse_pat_no_top_guard(
+                            None,
+                            RecoverComma::Yes,
+                            RecoverColon::Yes,
+                            CommaRecoveryMode::EitherTupleOrPipe,
+                        )
+                        .map_err(|err| err.cancel())
+                        .is_ok();
+                    if pattern_follows && snapshot.check(exp!(FatArrow)) {
+                        err.cancel();
+                        let guar = this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
+                            span: arm_span.shrink_to_hi(),
+                        });
+                        return Ok(Recovered::Yes(guar));
+                    }
+                    Err(err)
+                });
+            }
+            result?;
+
+            Ok((
+                ast::Arm {
+                    attrs,
+                    pat,
+                    guard,
+                    body: arm_body,
+                    span: arm_span,
+                    id: DUMMY_NODE_ID,
+                    is_placeholder: false,
+                },
+                Trailing::No,
+                UsePreAttrPos::No,
+            ))
+        })
+    }
+
+    fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<P<Expr>>> {
+        // Used to check the `if_let_guard` feature mostly by scanning
+        // `&&` tokens.
+        fn has_let_expr(expr: &Expr) -> bool {
+            match &expr.kind {
+                ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
+                    let lhs_rslt = has_let_expr(lhs);
+                    let rhs_rslt = has_let_expr(rhs);
+                    lhs_rslt || rhs_rslt
+                }
+                ExprKind::Let(..) => true,
+                _ => false,
+            }
+        }
+        if !self.eat_keyword(exp!(If)) {
+            // No match arm guard present.
+            return Ok(None);
+        }
+
+        let if_span = self.prev_token.span;
+        let mut cond = self.parse_match_guard_condition()?;
+
+        CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond);
+
+        if has_let_expr(&cond) {
+            let span = if_span.to(cond.span);
+            self.psess.gated_spans.gate(sym::if_let_guard, span);
+        }
+        Ok(Some(cond))
+    }
+
+    fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
+        if self.token == token::OpenParen {
+            let left = self.token.span;
+            let pat = self.parse_pat_no_top_guard(
+                None,
+                RecoverComma::Yes,
+                RecoverColon::Yes,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )?;
+            if let ast::PatKind::Paren(subpat) = &pat.kind
+                && let ast::PatKind::Guard(..) = &subpat.kind
+            {
+                // Detect and recover from `($pat if $cond) => $arm`.
+                // FIXME(guard_patterns): convert this to a normal guard instead
+                let span = pat.span;
+                let ast::PatKind::Paren(subpat) = pat.kind else { unreachable!() };
+                let ast::PatKind::Guard(_, mut cond) = subpat.kind else { unreachable!() };
+                self.psess.gated_spans.ungate_last(sym::guard_patterns, cond.span);
+                CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond);
+                let right = self.prev_token.span;
+                self.dcx().emit_err(errors::ParenthesesInMatchPat {
+                    span: vec![left, right],
+                    sugg: errors::ParenthesesInMatchPatSugg { left, right },
+                });
+                Ok((self.mk_pat(span, ast::PatKind::Wild), Some(cond)))
+            } else {
+                Ok((pat, self.parse_match_arm_guard()?))
+            }
+        } else {
+            // Regular parser flow:
+            let pat = self.parse_pat_no_top_guard(
+                None,
+                RecoverComma::Yes,
+                RecoverColon::Yes,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )?;
+            Ok((pat, self.parse_match_arm_guard()?))
+        }
+    }
+
+    fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
+        let attrs = self.parse_outer_attributes()?;
+        match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
+            Ok((expr, _)) => Ok(expr),
+            Err(mut err) => {
+                if self.prev_token == token::OpenBrace {
+                    let sugg_sp = self.prev_token.span.shrink_to_lo();
+                    // Consume everything within the braces, let's avoid further parse
+                    // errors.
+                    self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
+                    let msg = "you might have meant to start a match arm after the match guard";
+                    if self.eat(exp!(CloseBrace)) {
+                        let applicability = if self.token != token::FatArrow {
+                            // We have high confidence that we indeed didn't have a struct
+                            // literal in the match guard, but rather we had some operation
+                            // that ended in a path, immediately followed by a block that was
+                            // meant to be the match arm.
+                            Applicability::MachineApplicable
+                        } else {
+                            Applicability::MaybeIncorrect
+                        };
+                        err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
+                    }
+                }
+                Err(err)
+            }
+        }
+    }
+
+    pub(crate) fn is_builtin(&self) -> bool {
+        self.token.is_keyword(kw::Builtin) && self.look_ahead(1, |t| *t == token::Pound)
+    }
+
+    /// Parses a `try {...}` expression (`try` token already eaten).
+    fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
+        let (attrs, body) = self.parse_inner_attrs_and_block(None)?;
+        if self.eat_keyword(exp!(Catch)) {
+            Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
+        } else {
+            let span = span_lo.to(body.span);
+            self.psess.gated_spans.gate(sym::try_blocks, span);
+            Ok(self.mk_expr_with_attrs(span, ExprKind::TryBlock(body), attrs))
+        }
+    }
+
+    fn is_do_catch_block(&self) -> bool {
+        self.token.is_keyword(kw::Do)
+            && self.is_keyword_ahead(1, &[kw::Catch])
+            && self.look_ahead(2, |t| *t == token::OpenBrace || t.is_metavar_block())
+            && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
+    }
+
+    fn is_do_yeet(&self) -> bool {
+        self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Yeet])
+    }
+
+    fn is_try_block(&self) -> bool {
+        self.token.is_keyword(kw::Try)
+            && self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
+            && self.token_uninterpolated_span().at_least_rust_2018()
+    }
+
+    /// Parses an `async move? {...}` or `gen move? {...}` expression.
+    fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.token.span;
+        let kind = if self.eat_keyword(exp!(Async)) {
+            if self.eat_keyword(exp!(Gen)) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
+        } else {
+            assert!(self.eat_keyword(exp!(Gen)));
+            GenBlockKind::Gen
+        };
+        match kind {
+            GenBlockKind::Async => {
+                // `async` blocks are stable
+            }
+            GenBlockKind::Gen | GenBlockKind::AsyncGen => {
+                self.psess.gated_spans.gate(sym::gen_blocks, lo.to(self.prev_token.span));
+            }
+        }
+        let capture_clause = self.parse_capture_clause()?;
+        let decl_span = lo.to(self.prev_token.span);
+        let (attrs, body) = self.parse_inner_attrs_and_block(None)?;
+        let kind = ExprKind::Gen(capture_clause, body, kind, decl_span);
+        Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
+    }
+
+    fn is_gen_block(&self, kw: Symbol, lookahead: usize) -> bool {
+        self.is_keyword_ahead(lookahead, &[kw])
+            && ((
+                // `async move {`
+                self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use])
+                    && self.look_ahead(lookahead + 2, |t| {
+                        *t == token::OpenBrace || t.is_metavar_block()
+                    })
+            ) || (
+                // `async {`
+                self.look_ahead(lookahead + 1, |t| *t == token::OpenBrace || t.is_metavar_block())
+            ))
+    }
+
+    pub(super) fn is_async_gen_block(&self) -> bool {
+        self.token.is_keyword(kw::Async) && self.is_gen_block(kw::Gen, 1)
+    }
+
+    fn is_certainly_not_a_block(&self) -> bool {
+        // `{ ident, ` and `{ ident: ` cannot start a block.
+        self.look_ahead(1, |t| t.is_ident())
+            && self.look_ahead(2, |t| t == &token::Comma || t == &token::Colon)
+    }
+
+    fn maybe_parse_struct_expr(
+        &mut self,
+        qself: &Option<P<ast::QSelf>>,
+        path: &ast::Path,
+    ) -> Option<PResult<'a, P<Expr>>> {
+        let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
+        if struct_allowed || self.is_certainly_not_a_block() {
+            if let Err(err) = self.expect(exp!(OpenBrace)) {
+                return Some(Err(err));
+            }
+            let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
+            if let (Ok(expr), false) = (&expr, struct_allowed) {
+                // This is a struct literal, but we don't can't accept them here.
+                self.dcx().emit_err(errors::StructLiteralNotAllowedHere {
+                    span: expr.span,
+                    sub: errors::StructLiteralNotAllowedHereSugg {
+                        left: path.span.shrink_to_lo(),
+                        right: expr.span.shrink_to_hi(),
+                    },
+                });
+            }
+            return Some(expr);
+        }
+        None
+    }
+
+    pub(super) fn parse_struct_fields(
+        &mut self,
+        pth: ast::Path,
+        recover: bool,
+        close: ExpTokenPair<'_>,
+    ) -> PResult<
+        'a,
+        (
+            ThinVec<ExprField>,
+            ast::StructRest,
+            Option<ErrorGuaranteed>, /* async blocks are forbidden in Rust 2015 */
+        ),
+    > {
+        let mut fields = ThinVec::new();
+        let mut base = ast::StructRest::None;
+        let mut recovered_async = None;
+        let in_if_guard = self.restrictions.contains(Restrictions::IN_IF_GUARD);
+
+        let async_block_err = |e: &mut Diag<'_>, span: Span| {
+            errors::AsyncBlockIn2015 { span }.add_to_diag(e);
+            errors::HelpUseLatestEdition::new().add_to_diag(e);
+        };
+
+        while self.token != *close.tok {
+            if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) {
+                let exp_span = self.prev_token.span;
+                // We permit `.. }` on the left-hand side of a destructuring assignment.
+                if self.check(close) {
+                    base = ast::StructRest::Rest(self.prev_token.span);
+                    break;
+                }
+                match self.parse_expr() {
+                    Ok(e) => base = ast::StructRest::Base(e),
+                    Err(e) if recover => {
+                        e.emit();
+                        self.recover_stmt();
+                    }
+                    Err(e) => return Err(e),
+                }
+                self.recover_struct_comma_after_dotdot(exp_span);
+                break;
+            }
+
+            // Peek the field's ident before parsing its expr in order to emit better diagnostics.
+            let peek = self
+                .token
+                .ident()
+                .filter(|(ident, is_raw)| {
+                    (!ident.is_reserved() || matches!(is_raw, IdentIsRaw::Yes))
+                        && self.look_ahead(1, |tok| *tok == token::Colon)
+                })
+                .map(|(ident, _)| ident);
+
+            // We still want a field even if its expr didn't parse.
+            let field_ident = |this: &Self, guar: ErrorGuaranteed| {
+                peek.map(|ident| {
+                    let span = ident.span;
+                    ExprField {
+                        ident,
+                        span,
+                        expr: this.mk_expr_err(span, guar),
+                        is_shorthand: false,
+                        attrs: AttrVec::new(),
+                        id: DUMMY_NODE_ID,
+                        is_placeholder: false,
+                    }
+                })
+            };
+
+            let parsed_field = match self.parse_expr_field() {
+                Ok(f) => Ok(f),
+                Err(mut e) => {
+                    if pth == kw::Async {
+                        async_block_err(&mut e, pth.span);
+                    } else {
+                        e.span_label(pth.span, "while parsing this struct");
+                    }
+
+                    if let Some((ident, _)) = self.token.ident()
+                        && !self.token.is_reserved_ident()
+                        && self.look_ahead(1, |t| {
+                            AssocOp::from_token(t).is_some()
+                                || matches!(
+                                    t.kind,
+                                    token::OpenParen | token::OpenBracket | token::OpenBrace
+                                )
+                                || *t == token::Dot
+                        })
+                    {
+                        // Looks like they tried to write a shorthand, complex expression,
+                        // E.g.: `n + m`, `f(a)`, `a[i]`, `S { x: 3 }`, or `x.y`.
+                        e.span_suggestion_verbose(
+                            self.token.span.shrink_to_lo(),
+                            "try naming a field",
+                            &format!("{ident}: ",),
+                            Applicability::MaybeIncorrect,
+                        );
+                    }
+                    if in_if_guard && close.token_type == TokenType::CloseBrace {
+                        return Err(e);
+                    }
+
+                    if !recover {
+                        return Err(e);
+                    }
+
+                    let guar = e.emit();
+                    if pth == kw::Async {
+                        recovered_async = Some(guar);
+                    }
+
+                    // If the next token is a comma, then try to parse
+                    // what comes next as additional fields, rather than
+                    // bailing out until next `}`.
+                    if self.token != token::Comma {
+                        self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
+                        if self.token != token::Comma {
+                            break;
+                        }
+                    }
+
+                    Err(guar)
+                }
+            };
+
+            let is_shorthand = parsed_field.as_ref().is_ok_and(|f| f.is_shorthand);
+            // A shorthand field can be turned into a full field with `:`.
+            // We should point this out.
+            self.check_or_expected(!is_shorthand, TokenType::Colon);
+
+            match self.expect_one_of(&[exp!(Comma)], &[close]) {
+                Ok(_) => {
+                    if let Ok(f) = parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar))
+                    {
+                        // Only include the field if there's no parse error for the field name.
+                        fields.push(f);
+                    }
+                }
+                Err(mut e) => {
+                    if pth == kw::Async {
+                        async_block_err(&mut e, pth.span);
+                    } else {
+                        e.span_label(pth.span, "while parsing this struct");
+                        if peek.is_some() {
+                            e.span_suggestion(
+                                self.prev_token.span.shrink_to_hi(),
+                                "try adding a comma",
+                                ",",
+                                Applicability::MachineApplicable,
+                            );
+                        }
+                    }
+                    if !recover {
+                        return Err(e);
+                    }
+                    let guar = e.emit();
+                    if pth == kw::Async {
+                        recovered_async = Some(guar);
+                    } else if let Some(f) = field_ident(self, guar) {
+                        fields.push(f);
+                    }
+                    self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
+                    let _ = self.eat(exp!(Comma));
+                }
+            }
+        }
+        Ok((fields, base, recovered_async))
+    }
+
+    /// Precondition: already parsed the '{'.
+    pub(super) fn parse_expr_struct(
+        &mut self,
+        qself: Option<P<ast::QSelf>>,
+        pth: ast::Path,
+        recover: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let lo = pth.span;
+        let (fields, base, recovered_async) =
+            self.parse_struct_fields(pth.clone(), recover, exp!(CloseBrace))?;
+        let span = lo.to(self.token.span);
+        self.expect(exp!(CloseBrace))?;
+        let expr = if let Some(guar) = recovered_async {
+            ExprKind::Err(guar)
+        } else {
+            ExprKind::Struct(P(ast::StructExpr { qself, path: pth, fields, rest: base }))
+        };
+        Ok(self.mk_expr(span, expr))
+    }
+
+    fn recover_struct_comma_after_dotdot(&mut self, span: Span) {
+        if self.token != token::Comma {
+            return;
+        }
+        self.dcx().emit_err(errors::CommaAfterBaseStruct {
+            span: span.to(self.prev_token.span),
+            comma: self.token.span,
+        });
+        self.recover_stmt();
+    }
+
+    fn recover_struct_field_dots(&mut self, close: &TokenKind) -> bool {
+        if !self.look_ahead(1, |t| t == close) && self.eat(exp!(DotDotDot)) {
+            // recover from typo of `...`, suggest `..`
+            let span = self.prev_token.span;
+            self.dcx().emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
+            return true;
+        }
+        false
+    }
+
+    /// Converts an ident into 'label and emits an "expected a label, found an identifier" error.
+    fn recover_ident_into_label(&mut self, ident: Ident) -> Label {
+        // Convert `label` -> `'label`,
+        // so that nameres doesn't complain about non-existing label
+        let label = format!("'{}", ident.name);
+        let ident = Ident::new(Symbol::intern(&label), ident.span);
+
+        self.dcx().emit_err(errors::ExpectedLabelFoundIdent {
+            span: ident.span,
+            start: ident.span.shrink_to_lo(),
+        });
+
+        Label { ident }
+    }
+
+    /// Parses `ident (COLON expr)?`.
+    fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
+        let attrs = self.parse_outer_attributes()?;
+        self.recover_vcs_conflict_marker();
+        self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            let lo = this.token.span;
+
+            // Check if a colon exists one ahead. This means we're parsing a fieldname.
+            let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
+            // Proactively check whether parsing the field will be incorrect.
+            let is_wrong = this.token.is_non_reserved_ident()
+                && !this.look_ahead(1, |t| {
+                    t == &token::Colon
+                        || t == &token::Eq
+                        || t == &token::Comma
+                        || t == &token::CloseBrace
+                        || t == &token::CloseParen
+                });
+            if is_wrong {
+                return Err(this.dcx().create_err(errors::ExpectedStructField {
+                    span: this.look_ahead(1, |t| t.span),
+                    ident_span: this.token.span,
+                    token: this.look_ahead(1, |t| *t),
+                }));
+            }
+            let (ident, expr) = if is_shorthand {
+                // Mimic `x: x` for the `x` field shorthand.
+                let ident = this.parse_ident_common(false)?;
+                let path = ast::Path::from_ident(ident);
+                (ident, this.mk_expr(ident.span, ExprKind::Path(None, path)))
+            } else {
+                let ident = this.parse_field_name()?;
+                this.error_on_eq_field_init(ident);
+                this.bump(); // `:`
+                (ident, this.parse_expr()?)
+            };
+
+            Ok((
+                ast::ExprField {
+                    ident,
+                    span: lo.to(expr.span),
+                    expr,
+                    is_shorthand,
+                    attrs,
+                    id: DUMMY_NODE_ID,
+                    is_placeholder: false,
+                },
+                Trailing::from(this.token == token::Comma),
+                UsePreAttrPos::No,
+            ))
+        })
+    }
+
+    /// Check for `=`. This means the source incorrectly attempts to
+    /// initialize a field with an eq rather than a colon.
+    fn error_on_eq_field_init(&self, field_name: Ident) {
+        if self.token != token::Eq {
+            return;
+        }
+
+        self.dcx().emit_err(errors::EqFieldInit {
+            span: self.token.span,
+            eq: field_name.span.shrink_to_hi().to(self.token.span),
+        });
+    }
+
+    fn err_dotdotdot_syntax(&self, span: Span) {
+        self.dcx().emit_err(errors::DotDotDot { span });
+    }
+
+    fn err_larrow_operator(&self, span: Span) {
+        self.dcx().emit_err(errors::LeftArrowOperator { span });
+    }
+
+    fn mk_assign_op(&self, assign_op: AssignOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
+        ExprKind::AssignOp(assign_op, lhs, rhs)
+    }
+
+    fn mk_range(
+        &mut self,
+        start: Option<P<Expr>>,
+        end: Option<P<Expr>>,
+        limits: RangeLimits,
+    ) -> ExprKind {
+        if end.is_none() && limits == RangeLimits::Closed {
+            let guar = self.inclusive_range_with_incorrect_end();
+            ExprKind::Err(guar)
+        } else {
+            ExprKind::Range(start, end, limits)
+        }
+    }
+
+    fn mk_unary(&self, unop: UnOp, expr: P<Expr>) -> ExprKind {
+        ExprKind::Unary(unop, expr)
+    }
+
+    fn mk_binary(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
+        ExprKind::Binary(binop, lhs, rhs)
+    }
+
+    fn mk_index(&self, expr: P<Expr>, idx: P<Expr>, brackets_span: Span) -> ExprKind {
+        ExprKind::Index(expr, idx, brackets_span)
+    }
+
+    fn mk_call(&self, f: P<Expr>, args: ThinVec<P<Expr>>) -> ExprKind {
+        ExprKind::Call(f, args)
+    }
+
+    fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
+        let span = lo.to(self.prev_token.span);
+        let await_expr = self.mk_expr(span, ExprKind::Await(self_arg, self.prev_token.span));
+        self.recover_from_await_method_call();
+        await_expr
+    }
+
+    fn mk_use_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
+        let span = lo.to(self.prev_token.span);
+        let use_expr = self.mk_expr(span, ExprKind::Use(self_arg, self.prev_token.span));
+        self.recover_from_use();
+        use_expr
+    }
+
+    pub(crate) fn mk_expr_with_attrs(&self, span: Span, kind: ExprKind, attrs: AttrVec) -> P<Expr> {
+        P(Expr { kind, span, attrs, id: DUMMY_NODE_ID, tokens: None })
+    }
+
+    pub(crate) fn mk_expr(&self, span: Span, kind: ExprKind) -> P<Expr> {
+        self.mk_expr_with_attrs(span, kind, AttrVec::new())
+    }
+
+    pub(super) fn mk_expr_err(&self, span: Span, guar: ErrorGuaranteed) -> P<Expr> {
+        self.mk_expr(span, ExprKind::Err(guar))
+    }
+
+    /// Create expression span ensuring the span of the parent node
+    /// is larger than the span of lhs and rhs, including the attributes.
+    fn mk_expr_sp(&self, lhs: &P<Expr>, lhs_span: Span, op_span: Span, rhs_span: Span) -> Span {
+        lhs.attrs
+            .iter()
+            .find(|a| a.style == AttrStyle::Outer)
+            .map_or(lhs_span, |a| a.span)
+            .to(op_span)
+            .to(rhs_span)
+    }
+
+    fn collect_tokens_for_expr(
+        &mut self,
+        attrs: AttrWrapper,
+        f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
+    ) -> PResult<'a, P<Expr>> {
+        self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            let res = f(this, attrs)?;
+            let trailing = Trailing::from(
+                this.restrictions.contains(Restrictions::STMT_EXPR)
+                     && this.token == token::Semi
+                // FIXME: pass an additional condition through from the place
+                // where we know we need a comma, rather than assuming that
+                // `#[attr] expr,` always captures a trailing comma.
+                || this.token == token::Comma,
+            );
+            Ok((res, trailing, UsePreAttrPos::No))
+        })
+    }
+}
+
+/// Could this lifetime/label be an unclosed char literal? For example, `'a`
+/// could be, but `'abc` could not.
+pub(crate) fn could_be_unclosed_char_literal(ident: Ident) -> bool {
+    ident.name.as_str().starts_with('\'')
+        && unescape_char(ident.without_first_quote().name.as_str()).is_ok()
+}
+
+/// Used to forbid `let` expressions in certain syntactic locations.
+#[derive(Clone, Copy, Subdiagnostic)]
+pub(crate) enum ForbiddenLetReason {
+    /// `let` is not valid and the source environment is not important
+    OtherForbidden,
+    /// A let chain with the `||` operator
+    #[note(parse_not_supported_or)]
+    NotSupportedOr(#[primary_span] Span),
+    /// A let chain with invalid parentheses
+    ///
+    /// For example, `let 1 = 1 && (expr && expr)` is allowed
+    /// but `(let 1 = 1 && (let 1 = 1 && (let 1 = 1))) && let a = 1` is not
+    #[note(parse_not_supported_parentheses)]
+    NotSupportedParentheses(#[primary_span] Span),
+}
+
+/// Whether let chains are allowed on all editions, or it's edition dependent (allowed only on
+/// 2024 and later). In case of edition dependence, specify the currently present edition.
+pub enum LetChainsPolicy {
+    AlwaysAllowed,
+    EditionDependent { current_edition: Edition },
+}
+
+/// Visitor to check for invalid use of `ExprKind::Let` that can't
+/// easily be caught in parsing. For example:
+///
+/// ```rust,ignore (example)
+/// // Only know that the let isn't allowed once the `||` token is reached
+/// if let Some(x) = y || true {}
+/// // Only know that the let isn't allowed once the second `=` token is reached.
+/// if let Some(x) = y && z = 1 {}
+/// ```
+struct CondChecker<'a> {
+    parser: &'a Parser<'a>,
+    let_chains_policy: LetChainsPolicy,
+    depth: u32,
+    forbid_let_reason: Option<ForbiddenLetReason>,
+    missing_let: Option<errors::MaybeMissingLet>,
+    comparison: Option<errors::MaybeComparison>,
+}
+
+impl<'a> CondChecker<'a> {
+    fn new(parser: &'a Parser<'a>, let_chains_policy: LetChainsPolicy) -> Self {
+        CondChecker {
+            parser,
+            forbid_let_reason: None,
+            missing_let: None,
+            comparison: None,
+            let_chains_policy,
+            depth: 0,
+        }
+    }
+}
+
+impl MutVisitor for CondChecker<'_> {
+    fn visit_expr(&mut self, e: &mut Expr) {
+        self.depth += 1;
+        use ForbiddenLetReason::*;
+
+        let span = e.span;
+        match e.kind {
+            ExprKind::Let(_, _, _, ref mut recovered @ Recovered::No) => {
+                if let Some(reason) = self.forbid_let_reason {
+                    let error = match reason {
+                        NotSupportedOr(or_span) => {
+                            self.parser.dcx().emit_err(errors::OrInLetChain { span: or_span })
+                        }
+                        _ => self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet {
+                            span,
+                            reason,
+                            missing_let: self.missing_let,
+                            comparison: self.comparison,
+                        }),
+                    };
+                    *recovered = Recovered::Yes(error);
+                } else if self.depth > 1 {
+                    // Top level `let` is always allowed; only gate chains
+                    match self.let_chains_policy {
+                        LetChainsPolicy::AlwaysAllowed => (),
+                        LetChainsPolicy::EditionDependent { current_edition } => {
+                            if !current_edition.at_least_rust_2024() || !span.at_least_rust_2024() {
+                                self.parser.dcx().emit_err(errors::LetChainPre2024 { span });
+                            }
+                        }
+                    }
+                }
+            }
+            ExprKind::Binary(Spanned { node: BinOpKind::And, .. }, _, _) => {
+                mut_visit::walk_expr(self, e);
+            }
+            ExprKind::Binary(Spanned { node: BinOpKind::Or, span: or_span }, _, _)
+                if let None | Some(NotSupportedOr(_)) = self.forbid_let_reason =>
+            {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(NotSupportedOr(or_span));
+                mut_visit::walk_expr(self, e);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Paren(ref inner)
+                if let None | Some(NotSupportedParentheses(_)) = self.forbid_let_reason =>
+            {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(NotSupportedParentheses(inner.span));
+                mut_visit::walk_expr(self, e);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Assign(ref lhs, _, span) => {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(OtherForbidden);
+                let missing_let = self.missing_let;
+                if let ExprKind::Binary(_, _, rhs) = &lhs.kind
+                    && let ExprKind::Path(_, _)
+                    | ExprKind::Struct(_)
+                    | ExprKind::Call(_, _)
+                    | ExprKind::Array(_) = rhs.kind
+                {
+                    self.missing_let =
+                        Some(errors::MaybeMissingLet { span: rhs.span.shrink_to_lo() });
+                }
+                let comparison = self.comparison;
+                self.comparison = Some(errors::MaybeComparison { span: span.shrink_to_hi() });
+                mut_visit::walk_expr(self, e);
+                self.forbid_let_reason = forbid_let_reason;
+                self.missing_let = missing_let;
+                self.comparison = comparison;
+            }
+            ExprKind::Unary(_, _)
+            | ExprKind::Await(_, _)
+            | ExprKind::Use(_, _)
+            | ExprKind::AssignOp(_, _, _)
+            | ExprKind::Range(_, _, _)
+            | ExprKind::Try(_)
+            | ExprKind::AddrOf(_, _, _)
+            | ExprKind::Binary(_, _, _)
+            | ExprKind::Field(_, _)
+            | ExprKind::Index(_, _, _)
+            | ExprKind::Call(_, _)
+            | ExprKind::MethodCall(_)
+            | ExprKind::Tup(_)
+            | ExprKind::Paren(_) => {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(OtherForbidden);
+                mut_visit::walk_expr(self, e);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Cast(ref mut op, _)
+            | ExprKind::Type(ref mut op, _)
+            | ExprKind::UnsafeBinderCast(_, ref mut op, _) => {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(OtherForbidden);
+                self.visit_expr(op);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Let(_, _, _, Recovered::Yes(_))
+            | ExprKind::Array(_)
+            | ExprKind::ConstBlock(_)
+            | ExprKind::Lit(_)
+            | ExprKind::If(_, _, _)
+            | ExprKind::While(_, _, _)
+            | ExprKind::ForLoop { .. }
+            | ExprKind::Loop(_, _, _)
+            | ExprKind::Match(_, _, _)
+            | ExprKind::Closure(_)
+            | ExprKind::Block(_, _)
+            | ExprKind::Gen(_, _, _, _)
+            | ExprKind::TryBlock(_)
+            | ExprKind::Underscore
+            | ExprKind::Path(_, _)
+            | ExprKind::Break(_, _)
+            | ExprKind::Continue(_)
+            | ExprKind::Ret(_)
+            | ExprKind::InlineAsm(_)
+            | ExprKind::OffsetOf(_, _)
+            | ExprKind::MacCall(_)
+            | ExprKind::Struct(_)
+            | ExprKind::Repeat(_, _)
+            | ExprKind::Yield(_)
+            | ExprKind::Yeet(_)
+            | ExprKind::Become(_)
+            | ExprKind::IncludedBytes(_)
+            | ExprKind::FormatArgs(_)
+            | ExprKind::Err(_)
+            | ExprKind::Dummy => {
+                // These would forbid any let expressions they contain already.
+            }
+        }
+        self.depth -= 1;
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
new file mode 100644
index 00000000000..86326341a75
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -0,0 +1,578 @@
+use rustc_ast::{
+    self as ast, AttrVec, DUMMY_NODE_ID, GenericBounds, GenericParam, GenericParamKind, TyKind,
+    WhereClause, token,
+};
+use rustc_errors::{Applicability, PResult};
+use rustc_span::{Ident, Span, kw, sym};
+use thin_vec::ThinVec;
+
+use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
+use crate::errors::{
+    self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
+    UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
+    WhereClauseBeforeTupleStructBodySugg,
+};
+use crate::exp;
+
+enum PredicateKindOrStructBody {
+    PredicateKind(ast::WherePredicateKind),
+    StructBody(ThinVec<ast::FieldDef>),
+}
+
+impl<'a> Parser<'a> {
+    /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
+    ///
+    /// ```text
+    /// BOUND = LT_BOUND (e.g., `'a`)
+    /// ```
+    fn parse_lt_param_bounds(&mut self) -> GenericBounds {
+        let mut lifetimes = Vec::new();
+        while self.check_lifetime() {
+            lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
+
+            if !self.eat_plus() {
+                break;
+            }
+        }
+        lifetimes
+    }
+
+    /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
+    fn parse_ty_param(&mut self, preceding_attrs: AttrVec) -> PResult<'a, GenericParam> {
+        let ident = self.parse_ident()?;
+
+        // We might have a typo'd `Const` that was parsed as a type parameter.
+        if self.may_recover()
+            && ident.name.as_str().to_ascii_lowercase() == kw::Const.as_str()
+            && self.check_ident()
+        // `Const` followed by IDENT
+        {
+            return self.recover_const_param_with_mistyped_const(preceding_attrs, ident);
+        }
+
+        // Parse optional colon and param bounds.
+        let mut colon_span = None;
+        let bounds = if self.eat(exp!(Colon)) {
+            colon_span = Some(self.prev_token.span);
+            // recover from `impl Trait` in type param bound
+            if self.token.is_keyword(kw::Impl) {
+                let impl_span = self.token.span;
+                let snapshot = self.create_snapshot_for_diagnostic();
+                match self.parse_ty() {
+                    Ok(p) => {
+                        if let TyKind::ImplTrait(_, bounds) = &p.kind {
+                            let span = impl_span.to(self.token.span.shrink_to_lo());
+                            let mut err = self.dcx().struct_span_err(
+                                span,
+                                "expected trait bound, found `impl Trait` type",
+                            );
+                            err.span_label(span, "not a trait");
+                            if let [bound, ..] = &bounds[..] {
+                                err.span_suggestion_verbose(
+                                    impl_span.until(bound.span()),
+                                    "use the trait bounds directly",
+                                    String::new(),
+                                    Applicability::MachineApplicable,
+                                );
+                            }
+                            return Err(err);
+                        }
+                    }
+                    Err(err) => {
+                        err.cancel();
+                    }
+                }
+                self.restore_snapshot(snapshot);
+            }
+            self.parse_generic_bounds()?
+        } else {
+            Vec::new()
+        };
+
+        let default = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
+        Ok(GenericParam {
+            ident,
+            id: ast::DUMMY_NODE_ID,
+            attrs: preceding_attrs,
+            bounds,
+            kind: GenericParamKind::Type { default },
+            is_placeholder: false,
+            colon_span,
+        })
+    }
+
+    pub(crate) fn parse_const_param(
+        &mut self,
+        preceding_attrs: AttrVec,
+    ) -> PResult<'a, GenericParam> {
+        let const_span = self.token.span;
+
+        self.expect_keyword(exp!(Const))?;
+        let ident = self.parse_ident()?;
+        self.expect(exp!(Colon))?;
+        let ty = self.parse_ty()?;
+
+        // Parse optional const generics default value.
+        let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
+        let span = if let Some(ref default) = default {
+            const_span.to(default.value.span)
+        } else {
+            const_span.to(ty.span)
+        };
+
+        Ok(GenericParam {
+            ident,
+            id: ast::DUMMY_NODE_ID,
+            attrs: preceding_attrs,
+            bounds: Vec::new(),
+            kind: GenericParamKind::Const { ty, span, default },
+            is_placeholder: false,
+            colon_span: None,
+        })
+    }
+
+    pub(crate) fn recover_const_param_with_mistyped_const(
+        &mut self,
+        preceding_attrs: AttrVec,
+        mistyped_const_ident: Ident,
+    ) -> PResult<'a, GenericParam> {
+        let ident = self.parse_ident()?;
+        self.expect(exp!(Colon))?;
+        let ty = self.parse_ty()?;
+
+        // Parse optional const generics default value.
+        let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
+        let span = if let Some(ref default) = default {
+            mistyped_const_ident.span.to(default.value.span)
+        } else {
+            mistyped_const_ident.span.to(ty.span)
+        };
+
+        self.dcx()
+            .struct_span_err(
+                mistyped_const_ident.span,
+                format!("`const` keyword was mistyped as `{}`", mistyped_const_ident.as_str()),
+            )
+            .with_span_suggestion_verbose(
+                mistyped_const_ident.span,
+                "use the `const` keyword",
+                kw::Const,
+                Applicability::MachineApplicable,
+            )
+            .emit();
+
+        Ok(GenericParam {
+            ident,
+            id: ast::DUMMY_NODE_ID,
+            attrs: preceding_attrs,
+            bounds: Vec::new(),
+            kind: GenericParamKind::Const { ty, span, default },
+            is_placeholder: false,
+            colon_span: None,
+        })
+    }
+
+    /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
+    /// a trailing comma and erroneous trailing attributes.
+    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> {
+        let mut params = ThinVec::new();
+        let mut done = false;
+        while !done {
+            let attrs = self.parse_outer_attributes()?;
+            let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+                if this.eat_keyword_noexpect(kw::SelfUpper) {
+                    // `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
+                    // as if `Self` never existed.
+                    this.dcx()
+                        .emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
+
+                    // Eat a trailing comma, if it exists.
+                    let _ = this.eat(exp!(Comma));
+                }
+
+                let param = if this.check_lifetime() {
+                    let lifetime = this.expect_lifetime();
+                    // Parse lifetime parameter.
+                    let (colon_span, bounds) = if this.eat(exp!(Colon)) {
+                        (Some(this.prev_token.span), this.parse_lt_param_bounds())
+                    } else {
+                        (None, Vec::new())
+                    };
+
+                    if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) {
+                        let lo = this.token.span;
+                        // Parse `= 'lifetime`.
+                        this.bump(); // `=`
+                        this.bump(); // `'lifetime`
+                        let span = lo.to(this.prev_token.span);
+                        this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters {
+                            span,
+                        });
+                    }
+
+                    Some(ast::GenericParam {
+                        ident: lifetime.ident,
+                        id: lifetime.id,
+                        attrs,
+                        bounds,
+                        kind: ast::GenericParamKind::Lifetime,
+                        is_placeholder: false,
+                        colon_span,
+                    })
+                } else if this.check_keyword(exp!(Const)) {
+                    // Parse const parameter.
+                    Some(this.parse_const_param(attrs)?)
+                } else if this.check_ident() {
+                    // Parse type parameter.
+                    Some(this.parse_ty_param(attrs)?)
+                } else if this.token.can_begin_type() {
+                    // Trying to write an associated type bound? (#26271)
+                    let snapshot = this.create_snapshot_for_diagnostic();
+                    let lo = this.token.span;
+                    match this.parse_ty_where_predicate_kind() {
+                        Ok(_) => {
+                            this.dcx().emit_err(errors::BadAssocTypeBounds {
+                                span: lo.to(this.prev_token.span),
+                            });
+                            // FIXME - try to continue parsing other generics?
+                        }
+                        Err(err) => {
+                            err.cancel();
+                            // FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
+                            this.restore_snapshot(snapshot);
+                        }
+                    }
+                    return Ok((None, Trailing::No, UsePreAttrPos::No));
+                } else {
+                    // Check for trailing attributes and stop parsing.
+                    if !attrs.is_empty() {
+                        if !params.is_empty() {
+                            this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
+                        } else {
+                            this.dcx()
+                                .emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
+                        }
+                    }
+                    return Ok((None, Trailing::No, UsePreAttrPos::No));
+                };
+
+                if !this.eat(exp!(Comma)) {
+                    done = true;
+                }
+                // We just ate the comma, so no need to capture the trailing token.
+                Ok((param, Trailing::No, UsePreAttrPos::No))
+            })?;
+
+            if let Some(param) = param {
+                params.push(param);
+            } else {
+                break;
+            }
+        }
+        Ok(params)
+    }
+
+    /// Parses a set of optional generic type parameter declarations. Where
+    /// clauses are not parsed here, and must be added later via
+    /// `parse_where_clause()`.
+    ///
+    /// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
+    ///                  | ( < lifetimes , typaramseq ( , )? > )
+    /// where   typaramseq = ( typaram ) | ( typaram , typaramseq )
+    pub(super) fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
+        // invalid path separator `::` in function definition
+        // for example `fn invalid_path_separator::<T>() {}`
+        if self.eat_noexpect(&token::PathSep) {
+            self.dcx()
+                .emit_err(errors::InvalidPathSepInFnDefinition { span: self.prev_token.span });
+        }
+
+        let span_lo = self.token.span;
+        let (params, span) = if self.eat_lt() {
+            let params = self.parse_generic_params()?;
+            self.expect_gt_or_maybe_suggest_closing_generics(&params)?;
+            (params, span_lo.to(self.prev_token.span))
+        } else {
+            (ThinVec::new(), self.prev_token.span.shrink_to_hi())
+        };
+        Ok(ast::Generics {
+            params,
+            where_clause: WhereClause {
+                has_where_token: false,
+                predicates: ThinVec::new(),
+                span: self.prev_token.span.shrink_to_hi(),
+            },
+            span,
+        })
+    }
+
+    /// Parses an experimental fn contract
+    /// (`contract_requires(WWW) contract_ensures(ZZZ)`)
+    pub(super) fn parse_contract(
+        &mut self,
+    ) -> PResult<'a, Option<rustc_ast::ptr::P<ast::FnContract>>> {
+        let requires = if self.eat_keyword_noexpect(exp!(ContractRequires).kw) {
+            self.psess.gated_spans.gate(sym::contracts_internals, self.prev_token.span);
+            let precond = self.parse_expr()?;
+            Some(precond)
+        } else {
+            None
+        };
+        let ensures = if self.eat_keyword_noexpect(exp!(ContractEnsures).kw) {
+            self.psess.gated_spans.gate(sym::contracts_internals, self.prev_token.span);
+            let postcond = self.parse_expr()?;
+            Some(postcond)
+        } else {
+            None
+        };
+        if requires.is_none() && ensures.is_none() {
+            Ok(None)
+        } else {
+            Ok(Some(rustc_ast::ptr::P(ast::FnContract { requires, ensures })))
+        }
+    }
+
+    /// Parses an optional where-clause.
+    ///
+    /// ```ignore (only-for-syntax-highlight)
+    /// where T : Trait<U, V> + 'b, 'a : 'b
+    /// ```
+    pub(super) fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
+        self.parse_where_clause_common(None).map(|(clause, _)| clause)
+    }
+
+    pub(super) fn parse_struct_where_clause(
+        &mut self,
+        struct_name: Ident,
+        body_insertion_point: Span,
+    ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
+        self.parse_where_clause_common(Some((struct_name, body_insertion_point)))
+    }
+
+    fn parse_where_clause_common(
+        &mut self,
+        struct_: Option<(Ident, Span)>,
+    ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
+        let mut where_clause = WhereClause {
+            has_where_token: false,
+            predicates: ThinVec::new(),
+            span: self.prev_token.span.shrink_to_hi(),
+        };
+        let mut tuple_struct_body = None;
+
+        if !self.eat_keyword(exp!(Where)) {
+            return Ok((where_clause, None));
+        }
+
+        if self.eat_noexpect(&token::Colon) {
+            let colon_span = self.prev_token.span;
+            self.dcx()
+                .struct_span_err(colon_span, "unexpected colon after `where`")
+                .with_span_suggestion_short(
+                    colon_span,
+                    "remove the colon",
+                    "",
+                    Applicability::MachineApplicable,
+                )
+                .emit();
+        }
+
+        where_clause.has_where_token = true;
+        let where_lo = self.prev_token.span;
+
+        // We are considering adding generics to the `where` keyword as an alternative higher-rank
+        // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
+        // change we parse those generics now, but report an error.
+        if self.choose_generics_over_qpath(0) {
+            let generics = self.parse_generics()?;
+            self.dcx().emit_err(errors::WhereOnGenerics { span: generics.span });
+        }
+
+        loop {
+            let where_sp = where_lo.to(self.prev_token.span);
+            let attrs = self.parse_outer_attributes()?;
+            let pred_lo = self.token.span;
+            let predicate = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+                for attr in &attrs {
+                    self.psess.gated_spans.gate(sym::where_clause_attrs, attr.span);
+                }
+                let kind = if this.check_lifetime() && this.look_ahead(1, |t| !t.is_like_plus()) {
+                    let lifetime = this.expect_lifetime();
+                    // Bounds starting with a colon are mandatory, but possibly empty.
+                    this.expect(exp!(Colon))?;
+                    let bounds = this.parse_lt_param_bounds();
+                    Some(ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate {
+                        lifetime,
+                        bounds,
+                    }))
+                } else if this.check_type() {
+                    match this.parse_ty_where_predicate_kind_or_recover_tuple_struct_body(
+                        struct_, pred_lo, where_sp,
+                    )? {
+                        PredicateKindOrStructBody::PredicateKind(kind) => Some(kind),
+                        PredicateKindOrStructBody::StructBody(body) => {
+                            tuple_struct_body = Some(body);
+                            None
+                        }
+                    }
+                } else {
+                    None
+                };
+                let predicate = kind.map(|kind| ast::WherePredicate {
+                    attrs,
+                    kind,
+                    id: DUMMY_NODE_ID,
+                    span: pred_lo.to(this.prev_token.span),
+                    is_placeholder: false,
+                });
+                Ok((predicate, Trailing::No, UsePreAttrPos::No))
+            })?;
+            match predicate {
+                Some(predicate) => where_clause.predicates.push(predicate),
+                None => break,
+            }
+
+            let prev_token = self.prev_token.span;
+            let ate_comma = self.eat(exp!(Comma));
+
+            if self.eat_keyword_noexpect(kw::Where) {
+                self.dcx().emit_err(MultipleWhereClauses {
+                    span: self.token.span,
+                    previous: pred_lo,
+                    between: prev_token.shrink_to_hi().to(self.prev_token.span),
+                });
+            } else if !ate_comma {
+                break;
+            }
+        }
+
+        where_clause.span = where_lo.to(self.prev_token.span);
+        Ok((where_clause, tuple_struct_body))
+    }
+
+    fn parse_ty_where_predicate_kind_or_recover_tuple_struct_body(
+        &mut self,
+        struct_: Option<(Ident, Span)>,
+        pred_lo: Span,
+        where_sp: Span,
+    ) -> PResult<'a, PredicateKindOrStructBody> {
+        let mut snapshot = None;
+
+        if let Some(struct_) = struct_
+            && self.may_recover()
+            && self.token == token::OpenParen
+        {
+            snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
+        };
+
+        match self.parse_ty_where_predicate_kind() {
+            Ok(pred) => Ok(PredicateKindOrStructBody::PredicateKind(pred)),
+            Err(type_err) => {
+                let Some(((struct_name, body_insertion_point), mut snapshot)) = snapshot else {
+                    return Err(type_err);
+                };
+
+                // Check if we might have encountered an out of place tuple struct body.
+                match snapshot.parse_tuple_struct_body() {
+                    // Since we don't know the exact reason why we failed to parse the
+                    // predicate (we might have stumbled upon something bogus like `(T): ?`),
+                    // employ a simple heuristic to weed out some pathological cases:
+                    // Look for a semicolon (strong indicator) or anything that might mark
+                    // the end of the item (weak indicator) following the body.
+                    Ok(body)
+                        if matches!(snapshot.token.kind, token::Semi | token::Eof)
+                            || snapshot.token.can_begin_item() =>
+                    {
+                        type_err.cancel();
+
+                        let body_sp = pred_lo.to(snapshot.prev_token.span);
+                        let map = self.psess.source_map();
+
+                        self.dcx().emit_err(WhereClauseBeforeTupleStructBody {
+                            span: where_sp,
+                            name: struct_name.span,
+                            body: body_sp,
+                            sugg: map.span_to_snippet(body_sp).ok().map(|body| {
+                                WhereClauseBeforeTupleStructBodySugg {
+                                    left: body_insertion_point.shrink_to_hi(),
+                                    snippet: body,
+                                    right: map.end_point(where_sp).to(body_sp),
+                                }
+                            }),
+                        });
+
+                        self.restore_snapshot(snapshot);
+                        Ok(PredicateKindOrStructBody::StructBody(body))
+                    }
+                    Ok(_) => Err(type_err),
+                    Err(body_err) => {
+                        body_err.cancel();
+                        Err(type_err)
+                    }
+                }
+            }
+        }
+    }
+
+    fn parse_ty_where_predicate_kind(&mut self) -> PResult<'a, ast::WherePredicateKind> {
+        // Parse optional `for<'a, 'b>`.
+        // This `for` is parsed greedily and applies to the whole predicate,
+        // the bounded type can have its own `for` applying only to it.
+        // Examples:
+        // * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
+        // * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
+        // * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
+        let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
+
+        // Parse type with mandatory colon and (possibly empty) bounds,
+        // or with mandatory equality sign and the second type.
+        let ty = self.parse_ty_for_where_clause()?;
+        if self.eat(exp!(Colon)) {
+            let bounds = self.parse_generic_bounds()?;
+            Ok(ast::WherePredicateKind::BoundPredicate(ast::WhereBoundPredicate {
+                bound_generic_params: lifetime_defs,
+                bounded_ty: ty,
+                bounds,
+            }))
+        // FIXME: Decide what should be used here, `=` or `==`.
+        // FIXME: We are just dropping the binders in lifetime_defs on the floor here.
+        } else if self.eat(exp!(Eq)) || self.eat(exp!(EqEq)) {
+            let rhs_ty = self.parse_ty()?;
+            Ok(ast::WherePredicateKind::EqPredicate(ast::WhereEqPredicate { lhs_ty: ty, rhs_ty }))
+        } else {
+            self.maybe_recover_bounds_doubled_colon(&ty)?;
+            self.unexpected_any()
+        }
+    }
+
+    pub(super) fn choose_generics_over_qpath(&self, start: usize) -> bool {
+        // There's an ambiguity between generic parameters and qualified paths in impls.
+        // If we see `<` it may start both, so we have to inspect some following tokens.
+        // The following combinations can only start generics,
+        // but not qualified paths (with one exception):
+        //     `<` `>` - empty generic parameters
+        //     `<` `#` - generic parameters with attributes
+        //     `<` (LIFETIME|IDENT) `>` - single generic parameter
+        //     `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
+        //     `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
+        //     `<` (LIFETIME|IDENT) `=` - generic parameter with a default
+        //     `<` const                - generic const parameter
+        //     `<` IDENT `?`            - RECOVERY for `impl<T ?Bound` missing a `:`, meant to
+        //                                avoid the `T?` to `Option<T>` recovery for types.
+        // The only truly ambiguous case is
+        //     `<` IDENT `>` `::` IDENT ...
+        // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
+        // because this is what almost always expected in practice, qualified paths in impls
+        // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
+        self.look_ahead(start, |t| t == &token::Lt)
+            && (self.look_ahead(start + 1, |t| t == &token::Pound || t == &token::Gt)
+                || self.look_ahead(start + 1, |t| t.is_lifetime() || t.is_ident())
+                    && self.look_ahead(start + 2, |t| {
+                        matches!(t.kind, token::Gt | token::Comma | token::Colon | token::Eq)
+                        // Recovery-only branch -- this could be removed,
+                        // since it only affects diagnostics currently.
+                            || t.kind == token::Question
+                    })
+                || self.is_keyword_ahead(start + 1, &[kw::Const]))
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
new file mode 100644
index 00000000000..cb7c5649433
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -0,0 +1,3272 @@
+use std::fmt::Write;
+use std::mem;
+
+use ast::token::IdentIsRaw;
+use rustc_ast::ast::*;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
+use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use rustc_ast::util::case::Case;
+use rustc_ast::{self as ast};
+use rustc_ast_pretty::pprust;
+use rustc_errors::codes::*;
+use rustc_errors::{Applicability, PResult, StashKey, struct_span_code_err};
+use rustc_span::edit_distance::edit_distance;
+use rustc_span::edition::Edition;
+use rustc_span::{DUMMY_SP, ErrorGuaranteed, Ident, Span, Symbol, kw, source_map, sym};
+use thin_vec::{ThinVec, thin_vec};
+use tracing::debug;
+
+use super::diagnostics::{ConsumeClosingDelim, dummy_arg};
+use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
+use super::{
+    AttrWrapper, ExpKeywordPair, ExpTokenPair, FollowedByType, ForceCollect, Parser, PathStyle,
+    Recovered, Trailing, UsePreAttrPos,
+};
+use crate::errors::{self, FnPointerCannotBeAsync, FnPointerCannotBeConst, MacroExpandsToAdtField};
+use crate::{exp, fluent_generated as fluent};
+
+impl<'a> Parser<'a> {
+    /// Parses a source module as a crate. This is the main entry point for the parser.
+    pub fn parse_crate_mod(&mut self) -> PResult<'a, ast::Crate> {
+        let (attrs, items, spans) = self.parse_mod(exp!(Eof))?;
+        Ok(ast::Crate { attrs, items, spans, id: DUMMY_NODE_ID, is_placeholder: false })
+    }
+
+    /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
+    fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemKind> {
+        let safety = self.parse_safety(Case::Sensitive);
+        self.expect_keyword(exp!(Mod))?;
+        let ident = self.parse_ident()?;
+        let mod_kind = if self.eat(exp!(Semi)) {
+            ModKind::Unloaded
+        } else {
+            self.expect(exp!(OpenBrace))?;
+            let (inner_attrs, items, inner_span) = self.parse_mod(exp!(CloseBrace))?;
+            attrs.extend(inner_attrs);
+            ModKind::Loaded(items, Inline::Yes, inner_span, Ok(()))
+        };
+        Ok(ItemKind::Mod(safety, ident, mod_kind))
+    }
+
+    /// Parses the contents of a module (inner attributes followed by module items).
+    /// We exit once we hit `term` which can be either
+    /// - EOF (for files)
+    /// - `}` for mod items
+    pub fn parse_mod(
+        &mut self,
+        term: ExpTokenPair<'_>,
+    ) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
+        let lo = self.token.span;
+        let attrs = self.parse_inner_attributes()?;
+
+        let post_attr_lo = self.token.span;
+        let mut items: ThinVec<P<_>> = ThinVec::new();
+
+        // There shouldn't be any stray semicolons before or after items.
+        // `parse_item` consumes the appropriate semicolons so any leftover is an error.
+        loop {
+            while self.maybe_consume_incorrect_semicolon(items.last().map(|x| &**x)) {} // Eat all bad semicolons
+            let Some(item) = self.parse_item(ForceCollect::No)? else {
+                break;
+            };
+            items.push(item);
+        }
+
+        if !self.eat(term) {
+            let token_str = super::token_descr(&self.token);
+            if !self.maybe_consume_incorrect_semicolon(items.last().map(|x| &**x)) {
+                let is_let = self.token.is_keyword(kw::Let);
+                let is_let_mut = is_let && self.look_ahead(1, |t| t.is_keyword(kw::Mut));
+                let let_has_ident = is_let && !is_let_mut && self.is_kw_followed_by_ident(kw::Let);
+
+                let msg = format!("expected item, found {token_str}");
+                let mut err = self.dcx().struct_span_err(self.token.span, msg);
+
+                let label = if is_let {
+                    "`let` cannot be used for global variables"
+                } else {
+                    "expected item"
+                };
+                err.span_label(self.token.span, label);
+
+                if is_let {
+                    if is_let_mut {
+                        err.help("consider using `static` and a `Mutex` instead of `let mut`");
+                    } else if let_has_ident {
+                        err.span_suggestion_short(
+                            self.token.span,
+                            "consider using `static` or `const` instead of `let`",
+                            "static",
+                            Applicability::MaybeIncorrect,
+                        );
+                    } else {
+                        err.help("consider using `static` or `const` instead of `let`");
+                    }
+                }
+                err.note("for a full list of items that can appear in modules, see <https://doc.rust-lang.org/reference/items.html>");
+                return Err(err);
+            }
+        }
+
+        let inject_use_span = post_attr_lo.data().with_hi(post_attr_lo.lo());
+        let mod_spans = ModSpans { inner_span: lo.to(self.prev_token.span), inject_use_span };
+        Ok((attrs, items, mod_spans))
+    }
+}
+
+impl<'a> Parser<'a> {
+    pub fn parse_item(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<P<Item>>> {
+        let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
+        self.parse_item_(fn_parse_mode, force_collect).map(|i| i.map(P))
+    }
+
+    fn parse_item_(
+        &mut self,
+        fn_parse_mode: FnParseMode,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Option<Item>> {
+        self.recover_vcs_conflict_marker();
+        let attrs = self.parse_outer_attributes()?;
+        self.recover_vcs_conflict_marker();
+        self.parse_item_common(attrs, true, false, fn_parse_mode, force_collect)
+    }
+
+    pub(super) fn parse_item_common(
+        &mut self,
+        attrs: AttrWrapper,
+        mac_allowed: bool,
+        attrs_allowed: bool,
+        fn_parse_mode: FnParseMode,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Option<Item>> {
+        if let Some(item) =
+            self.eat_metavar_seq(MetaVarKind::Item, |this| this.parse_item(ForceCollect::Yes))
+        {
+            let mut item = item.expect("an actual item");
+            attrs.prepend_to_nt_inner(&mut item.attrs);
+            return Ok(Some(*item));
+        }
+
+        self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
+            let lo = this.token.span;
+            let vis = this.parse_visibility(FollowedByType::No)?;
+            let mut def = this.parse_defaultness();
+            let kind = this.parse_item_kind(
+                &mut attrs,
+                mac_allowed,
+                lo,
+                &vis,
+                &mut def,
+                fn_parse_mode,
+                Case::Sensitive,
+            )?;
+            if let Some(kind) = kind {
+                this.error_on_unconsumed_default(def, &kind);
+                let span = lo.to(this.prev_token.span);
+                let id = DUMMY_NODE_ID;
+                let item = Item { attrs, id, kind, vis, span, tokens: None };
+                return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
+            }
+
+            // At this point, we have failed to parse an item.
+            if !matches!(vis.kind, VisibilityKind::Inherited) {
+                this.dcx().emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
+            }
+
+            if let Defaultness::Default(span) = def {
+                this.dcx().emit_err(errors::DefaultNotFollowedByItem { span });
+            }
+
+            if !attrs_allowed {
+                this.recover_attrs_no_item(&attrs)?;
+            }
+            Ok((None, Trailing::No, UsePreAttrPos::No))
+        })
+    }
+
+    /// Error in-case `default` was parsed in an in-appropriate context.
+    fn error_on_unconsumed_default(&self, def: Defaultness, kind: &ItemKind) {
+        if let Defaultness::Default(span) = def {
+            self.dcx().emit_err(errors::InappropriateDefault {
+                span,
+                article: kind.article(),
+                descr: kind.descr(),
+            });
+        }
+    }
+
+    /// Parses one of the items allowed by the flags.
+    fn parse_item_kind(
+        &mut self,
+        attrs: &mut AttrVec,
+        macros_allowed: bool,
+        lo: Span,
+        vis: &Visibility,
+        def: &mut Defaultness,
+        fn_parse_mode: FnParseMode,
+        case: Case,
+    ) -> PResult<'a, Option<ItemKind>> {
+        let check_pub = def == &Defaultness::Final;
+        let mut def_ = || mem::replace(def, Defaultness::Final);
+
+        let info = if !self.is_use_closure() && self.eat_keyword_case(exp!(Use), case) {
+            self.parse_use_item()?
+        } else if self.check_fn_front_matter(check_pub, case) {
+            // FUNCTION ITEM
+            let (ident, sig, generics, contract, body) =
+                self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
+            ItemKind::Fn(Box::new(Fn {
+                defaultness: def_(),
+                ident,
+                sig,
+                generics,
+                contract,
+                body,
+                define_opaque: None,
+            }))
+        } else if self.eat_keyword(exp!(Extern)) {
+            if self.eat_keyword(exp!(Crate)) {
+                // EXTERN CRATE
+                self.parse_item_extern_crate()?
+            } else {
+                // EXTERN BLOCK
+                self.parse_item_foreign_mod(attrs, Safety::Default)?
+            }
+        } else if self.is_unsafe_foreign_mod() {
+            // EXTERN BLOCK
+            let safety = self.parse_safety(Case::Sensitive);
+            self.expect_keyword(exp!(Extern))?;
+            self.parse_item_foreign_mod(attrs, safety)?
+        } else if self.is_static_global() {
+            let safety = self.parse_safety(Case::Sensitive);
+            // STATIC ITEM
+            self.bump(); // `static`
+            let mutability = self.parse_mutability();
+            self.parse_static_item(safety, mutability)?
+        } else if self.check_keyword(exp!(Trait)) || self.check_trait_front_matter() {
+            // TRAIT ITEM
+            self.parse_item_trait(attrs, lo)?
+        } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
+            // CONST ITEM
+            if self.token.is_keyword(kw::Impl) {
+                // recover from `const impl`, suggest `impl const`
+                self.recover_const_impl(const_span, attrs, def_())?
+            } else {
+                self.recover_const_mut(const_span);
+                self.recover_missing_kw_before_item()?;
+                let (ident, generics, ty, expr) = self.parse_const_item()?;
+                ItemKind::Const(Box::new(ConstItem {
+                    defaultness: def_(),
+                    ident,
+                    generics,
+                    ty,
+                    expr,
+                    define_opaque: None,
+                }))
+            }
+        } else if self.check_keyword(exp!(Impl))
+            || self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Impl])
+        {
+            // IMPL ITEM
+            self.parse_item_impl(attrs, def_())?
+        } else if self.is_reuse_path_item() {
+            self.parse_item_delegation()?
+        } else if self.check_keyword(exp!(Mod))
+            || self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Mod])
+        {
+            // MODULE ITEM
+            self.parse_item_mod(attrs)?
+        } else if self.eat_keyword(exp!(Type)) {
+            // TYPE ITEM
+            self.parse_type_alias(def_())?
+        } else if self.eat_keyword(exp!(Enum)) {
+            // ENUM ITEM
+            self.parse_item_enum()?
+        } else if self.eat_keyword(exp!(Struct)) {
+            // STRUCT ITEM
+            self.parse_item_struct()?
+        } else if self.is_kw_followed_by_ident(kw::Union) {
+            // UNION ITEM
+            self.bump(); // `union`
+            self.parse_item_union()?
+        } else if self.is_builtin() {
+            // BUILTIN# ITEM
+            return self.parse_item_builtin();
+        } else if self.eat_keyword(exp!(Macro)) {
+            // MACROS 2.0 ITEM
+            self.parse_item_decl_macro(lo)?
+        } else if let IsMacroRulesItem::Yes { has_bang } = self.is_macro_rules_item() {
+            // MACRO_RULES ITEM
+            self.parse_item_macro_rules(vis, has_bang)?
+        } else if self.isnt_macro_invocation()
+            && (self.token.is_ident_named(sym::import)
+                || self.token.is_ident_named(sym::using)
+                || self.token.is_ident_named(sym::include)
+                || self.token.is_ident_named(sym::require))
+        {
+            return self.recover_import_as_use();
+        } else if self.isnt_macro_invocation() && vis.kind.is_pub() {
+            self.recover_missing_kw_before_item()?;
+            return Ok(None);
+        } else if self.isnt_macro_invocation() && case == Case::Sensitive {
+            _ = def_;
+
+            // Recover wrong cased keywords
+            return self.parse_item_kind(
+                attrs,
+                macros_allowed,
+                lo,
+                vis,
+                def,
+                fn_parse_mode,
+                Case::Insensitive,
+            );
+        } else if macros_allowed && self.check_path() {
+            if self.isnt_macro_invocation() {
+                self.recover_missing_kw_before_item()?;
+            }
+            // MACRO INVOCATION ITEM
+            ItemKind::MacCall(P(self.parse_item_macro(vis)?))
+        } else {
+            return Ok(None);
+        };
+        Ok(Some(info))
+    }
+
+    fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemKind>> {
+        let span = self.token.span;
+        let token_name = super::token_descr(&self.token);
+        let snapshot = self.create_snapshot_for_diagnostic();
+        self.bump();
+        match self.parse_use_item() {
+            Ok(u) => {
+                self.dcx().emit_err(errors::RecoverImportAsUse { span, token_name });
+                Ok(Some(u))
+            }
+            Err(e) => {
+                e.cancel();
+                self.restore_snapshot(snapshot);
+                Ok(None)
+            }
+        }
+    }
+
+    fn parse_use_item(&mut self) -> PResult<'a, ItemKind> {
+        let tree = self.parse_use_tree()?;
+        if let Err(mut e) = self.expect_semi() {
+            match tree.kind {
+                UseTreeKind::Glob => {
+                    e.note("the wildcard token must be last on the path");
+                }
+                UseTreeKind::Nested { .. } => {
+                    e.note("glob-like brace syntax must be last on the path");
+                }
+                _ => (),
+            }
+            return Err(e);
+        }
+        Ok(ItemKind::Use(tree))
+    }
+
+    /// When parsing a statement, would the start of a path be an item?
+    pub(super) fn is_path_start_item(&mut self) -> bool {
+        self.is_kw_followed_by_ident(kw::Union) // no: `union::b`, yes: `union U { .. }`
+        || self.is_reuse_path_item()
+        || self.check_trait_front_matter() // no: `auto::b`, yes: `auto trait X { .. }`
+        || self.is_async_fn() // no(2015): `async::b`, yes: `async fn`
+        || matches!(self.is_macro_rules_item(), IsMacroRulesItem::Yes{..}) // no: `macro_rules::b`, yes: `macro_rules! mac`
+    }
+
+    fn is_reuse_path_item(&mut self) -> bool {
+        // no: `reuse ::path` for compatibility reasons with macro invocations
+        self.token.is_keyword(kw::Reuse)
+            && self.look_ahead(1, |t| t.is_path_start() && *t != token::PathSep)
+    }
+
+    /// Are we sure this could not possibly be a macro invocation?
+    fn isnt_macro_invocation(&mut self) -> bool {
+        self.check_ident() && self.look_ahead(1, |t| *t != token::Bang && *t != token::PathSep)
+    }
+
+    /// Recover on encountering a struct, enum, or method definition where the user
+    /// forgot to add the `struct`, `enum`, or `fn` keyword
+    fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
+        let is_pub = self.prev_token.is_keyword(kw::Pub);
+        let is_const = self.prev_token.is_keyword(kw::Const);
+        let ident_span = self.token.span;
+        let span = if is_pub { self.prev_token.span.to(ident_span) } else { ident_span };
+        let insert_span = ident_span.shrink_to_lo();
+
+        let ident = if self.token.is_ident()
+            && (!is_const || self.look_ahead(1, |t| *t == token::OpenParen))
+            && self.look_ahead(1, |t| {
+                matches!(t.kind, token::Lt | token::OpenBrace | token::OpenParen)
+            }) {
+            self.parse_ident().unwrap()
+        } else {
+            return Ok(());
+        };
+
+        let mut found_generics = false;
+        if self.check(exp!(Lt)) {
+            found_generics = true;
+            self.eat_to_tokens(&[exp!(Gt)]);
+            self.bump(); // `>`
+        }
+
+        let err = if self.check(exp!(OpenBrace)) {
+            // possible struct or enum definition where `struct` or `enum` was forgotten
+            if self.look_ahead(1, |t| *t == token::CloseBrace) {
+                // `S {}` could be unit enum or struct
+                Some(errors::MissingKeywordForItemDefinition::EnumOrStruct { span })
+            } else if self.look_ahead(2, |t| *t == token::Colon)
+                || self.look_ahead(3, |t| *t == token::Colon)
+            {
+                // `S { f:` or `S { pub f:`
+                Some(errors::MissingKeywordForItemDefinition::Struct { span, insert_span, ident })
+            } else {
+                Some(errors::MissingKeywordForItemDefinition::Enum { span, insert_span, ident })
+            }
+        } else if self.check(exp!(OpenParen)) {
+            // possible function or tuple struct definition where `fn` or `struct` was forgotten
+            self.bump(); // `(`
+            let is_method = self.recover_self_param();
+
+            self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::Yes);
+
+            let err = if self.check(exp!(RArrow)) || self.check(exp!(OpenBrace)) {
+                self.eat_to_tokens(&[exp!(OpenBrace)]);
+                self.bump(); // `{`
+                self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
+                if is_method {
+                    errors::MissingKeywordForItemDefinition::Method { span, insert_span, ident }
+                } else {
+                    errors::MissingKeywordForItemDefinition::Function { span, insert_span, ident }
+                }
+            } else if is_pub && self.check(exp!(Semi)) {
+                errors::MissingKeywordForItemDefinition::Struct { span, insert_span, ident }
+            } else {
+                errors::MissingKeywordForItemDefinition::Ambiguous {
+                    span,
+                    subdiag: if found_generics {
+                        None
+                    } else if let Ok(snippet) = self.span_to_snippet(ident_span) {
+                        Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
+                            span: ident_span,
+                            snippet,
+                        })
+                    } else {
+                        Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
+                    },
+                }
+            };
+            Some(err)
+        } else if found_generics {
+            Some(errors::MissingKeywordForItemDefinition::Ambiguous { span, subdiag: None })
+        } else {
+            None
+        };
+
+        if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) }
+    }
+
+    fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemKind>> {
+        // To be expanded
+        Ok(None)
+    }
+
+    /// Parses an item macro, e.g., `item!();`.
+    fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
+        let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
+        self.expect(exp!(Bang))?; // `!`
+        match self.parse_delim_args() {
+            // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
+            Ok(args) => {
+                self.eat_semi_for_macro_if_needed(&args);
+                self.complain_if_pub_macro(vis, false);
+                Ok(MacCall { path, args })
+            }
+
+            Err(mut err) => {
+                // Maybe the user misspelled `macro_rules` (issue #91227)
+                if self.token.is_ident()
+                    && let [segment] = path.segments.as_slice()
+                    && edit_distance("macro_rules", &segment.ident.to_string(), 2).is_some()
+                {
+                    err.span_suggestion(
+                        path.span,
+                        "perhaps you meant to define a macro",
+                        "macro_rules",
+                        Applicability::MachineApplicable,
+                    );
+                }
+                Err(err)
+            }
+        }
+    }
+
+    /// Recover if we parsed attributes and expected an item but there was none.
+    fn recover_attrs_no_item(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
+        let ([start @ end] | [start, .., end]) = attrs else {
+            return Ok(());
+        };
+        let msg = if end.is_doc_comment() {
+            "expected item after doc comment"
+        } else {
+            "expected item after attributes"
+        };
+        let mut err = self.dcx().struct_span_err(end.span, msg);
+        if end.is_doc_comment() {
+            err.span_label(end.span, "this doc comment doesn't document anything");
+        } else if self.token == TokenKind::Semi {
+            err.span_suggestion_verbose(
+                self.token.span,
+                "consider removing this semicolon",
+                "",
+                Applicability::MaybeIncorrect,
+            );
+        }
+        if let [.., penultimate, _] = attrs {
+            err.span_label(start.span.to(penultimate.span), "other attributes here");
+        }
+        Err(err)
+    }
+
+    fn is_async_fn(&self) -> bool {
+        self.token.is_keyword(kw::Async) && self.is_keyword_ahead(1, &[kw::Fn])
+    }
+
+    fn parse_polarity(&mut self) -> ast::ImplPolarity {
+        // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
+        if self.check(exp!(Bang)) && self.look_ahead(1, |t| t.can_begin_type()) {
+            self.bump(); // `!`
+            ast::ImplPolarity::Negative(self.prev_token.span)
+        } else {
+            ast::ImplPolarity::Positive
+        }
+    }
+
+    /// Parses an implementation item.
+    ///
+    /// ```ignore (illustrative)
+    /// impl<'a, T> TYPE { /* impl items */ }
+    /// impl<'a, T> TRAIT for TYPE { /* impl items */ }
+    /// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
+    /// impl<'a, T> const TRAIT for TYPE { /* impl items */ }
+    /// ```
+    ///
+    /// We actually parse slightly more relaxed grammar for better error reporting and recovery.
+    /// ```ebnf
+    /// "impl" GENERICS "const"? "!"? TYPE "for"? (TYPE | "..") ("where" PREDICATES)? "{" BODY "}"
+    /// "impl" GENERICS "const"? "!"? TYPE ("where" PREDICATES)? "{" BODY "}"
+    /// ```
+    fn parse_item_impl(
+        &mut self,
+        attrs: &mut AttrVec,
+        defaultness: Defaultness,
+    ) -> PResult<'a, ItemKind> {
+        let safety = self.parse_safety(Case::Sensitive);
+        self.expect_keyword(exp!(Impl))?;
+
+        // First, parse generic parameters if necessary.
+        let mut generics = if self.choose_generics_over_qpath(0) {
+            self.parse_generics()?
+        } else {
+            let mut generics = Generics::default();
+            // impl A for B {}
+            //    /\ this is where `generics.span` should point when there are no type params.
+            generics.span = self.prev_token.span.shrink_to_hi();
+            generics
+        };
+
+        let constness = self.parse_constness(Case::Sensitive);
+        if let Const::Yes(span) = constness {
+            self.psess.gated_spans.gate(sym::const_trait_impl, span);
+        }
+
+        // Parse stray `impl async Trait`
+        if (self.token_uninterpolated_span().at_least_rust_2018()
+            && self.token.is_keyword(kw::Async))
+            || self.is_kw_followed_by_ident(kw::Async)
+        {
+            self.bump();
+            self.dcx().emit_err(errors::AsyncImpl { span: self.prev_token.span });
+        }
+
+        let polarity = self.parse_polarity();
+
+        // Parse both types and traits as a type, then reinterpret if necessary.
+        let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
+        {
+            let span = self.prev_token.span.between(self.token.span);
+            return Err(self.dcx().create_err(errors::MissingTraitInTraitImpl {
+                span,
+                for_span: span.to(self.token.span),
+            }));
+        } else {
+            self.parse_ty_with_generics_recovery(&generics)?
+        };
+
+        // If `for` is missing we try to recover.
+        let has_for = self.eat_keyword(exp!(For));
+        let missing_for_span = self.prev_token.span.between(self.token.span);
+
+        let ty_second = if self.token == token::DotDot {
+            // We need to report this error after `cfg` expansion for compatibility reasons
+            self.bump(); // `..`, do not add it to expected tokens
+
+            // AST validation later detects this `TyKind::Dummy` and emits an
+            // error. (#121072 will hopefully remove all this special handling
+            // of the obsolete `impl Trait for ..` and then this can go away.)
+            Some(self.mk_ty(self.prev_token.span, TyKind::Dummy))
+        } else if has_for || self.token.can_begin_type() {
+            Some(self.parse_ty()?)
+        } else {
+            None
+        };
+
+        generics.where_clause = self.parse_where_clause()?;
+
+        let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item(ForceCollect::No))?;
+
+        let (of_trait, self_ty) = match ty_second {
+            Some(ty_second) => {
+                // impl Trait for Type
+                if !has_for {
+                    self.dcx().emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
+                }
+
+                let ty_first = *ty_first;
+                let path = match ty_first.kind {
+                    // This notably includes paths passed through `ty` macro fragments (#46438).
+                    TyKind::Path(None, path) => path,
+                    other => {
+                        if let TyKind::ImplTrait(_, bounds) = other
+                            && let [bound] = bounds.as_slice()
+                            && let GenericBound::Trait(poly_trait_ref) = bound
+                        {
+                            // Suggest removing extra `impl` keyword:
+                            // `impl<T: Default> impl Default for Wrapper<T>`
+                            //                   ^^^^^
+                            let extra_impl_kw = ty_first.span.until(bound.span());
+                            self.dcx().emit_err(errors::ExtraImplKeywordInTraitImpl {
+                                extra_impl_kw,
+                                impl_trait_span: ty_first.span,
+                            });
+                            poly_trait_ref.trait_ref.path.clone()
+                        } else {
+                            return Err(self.dcx().create_err(
+                                errors::ExpectedTraitInTraitImplFoundType { span: ty_first.span },
+                            ));
+                        }
+                    }
+                };
+                let trait_ref = TraitRef { path, ref_id: ty_first.id };
+
+                (Some(trait_ref), ty_second)
+            }
+            None => (None, ty_first), // impl Type
+        };
+        Ok(ItemKind::Impl(Box::new(Impl {
+            safety,
+            polarity,
+            defaultness,
+            constness,
+            generics,
+            of_trait,
+            self_ty,
+            items: impl_items,
+        })))
+    }
+
+    fn parse_item_delegation(&mut self) -> PResult<'a, ItemKind> {
+        let span = self.token.span;
+        self.expect_keyword(exp!(Reuse))?;
+
+        let (qself, path) = if self.eat_lt() {
+            let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+            (Some(qself), path)
+        } else {
+            (None, self.parse_path(PathStyle::Expr)?)
+        };
+
+        let rename = |this: &mut Self| {
+            Ok(if this.eat_keyword(exp!(As)) { Some(this.parse_ident()?) } else { None })
+        };
+        let body = |this: &mut Self| {
+            Ok(if this.check(exp!(OpenBrace)) {
+                Some(this.parse_block()?)
+            } else {
+                this.expect(exp!(Semi))?;
+                None
+            })
+        };
+
+        let item_kind = if self.eat_path_sep() {
+            let suffixes = if self.eat(exp!(Star)) {
+                None
+            } else {
+                let parse_suffix = |p: &mut Self| Ok((p.parse_path_segment_ident()?, rename(p)?));
+                Some(self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), parse_suffix)?.0)
+            };
+            let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? };
+            ItemKind::DelegationMac(Box::new(deleg))
+        } else {
+            let rename = rename(self)?;
+            let ident = rename.unwrap_or_else(|| path.segments.last().unwrap().ident);
+            let deleg = Delegation {
+                id: DUMMY_NODE_ID,
+                qself,
+                path,
+                ident,
+                rename,
+                body: body(self)?,
+                from_glob: false,
+            };
+            ItemKind::Delegation(Box::new(deleg))
+        };
+
+        let span = span.to(self.prev_token.span);
+        self.psess.gated_spans.gate(sym::fn_delegation, span);
+
+        Ok(item_kind)
+    }
+
+    fn parse_item_list<T>(
+        &mut self,
+        attrs: &mut AttrVec,
+        mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
+    ) -> PResult<'a, ThinVec<T>> {
+        let open_brace_span = self.token.span;
+
+        // Recover `impl Ty;` instead of `impl Ty {}`
+        if self.token == TokenKind::Semi {
+            self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
+            self.bump();
+            return Ok(ThinVec::new());
+        }
+
+        self.expect(exp!(OpenBrace))?;
+        attrs.extend(self.parse_inner_attributes()?);
+
+        let mut items = ThinVec::new();
+        while !self.eat(exp!(CloseBrace)) {
+            if self.recover_doc_comment_before_brace() {
+                continue;
+            }
+            self.recover_vcs_conflict_marker();
+            match parse_item(self) {
+                Ok(None) => {
+                    let mut is_unnecessary_semicolon = !items.is_empty()
+                        // When the close delim is `)` in a case like the following, `token.kind`
+                        // is expected to be `token::CloseParen`, but the actual `token.kind` is
+                        // `token::CloseBrace`. This is because the `token.kind` of the close delim
+                        // is treated as the same as that of the open delim in
+                        // `TokenTreesReader::parse_token_tree`, even if the delimiters of them are
+                        // different. Therefore, `token.kind` should not be compared here.
+                        //
+                        // issue-60075.rs
+                        // ```
+                        // trait T {
+                        //     fn qux() -> Option<usize> {
+                        //         let _ = if true {
+                        //         });
+                        //          ^ this close delim
+                        //         Some(4)
+                        //     }
+                        // ```
+                        && self
+                            .span_to_snippet(self.prev_token.span)
+                            .is_ok_and(|snippet| snippet == "}")
+                        && self.token == token::Semi;
+                    let mut semicolon_span = self.token.span;
+                    if !is_unnecessary_semicolon {
+                        // #105369, Detect spurious `;` before assoc fn body
+                        is_unnecessary_semicolon =
+                            self.token == token::OpenBrace && self.prev_token == token::Semi;
+                        semicolon_span = self.prev_token.span;
+                    }
+                    // We have to bail or we'll potentially never make progress.
+                    let non_item_span = self.token.span;
+                    let is_let = self.token.is_keyword(kw::Let);
+
+                    let mut err =
+                        self.dcx().struct_span_err(non_item_span, "non-item in item list");
+                    self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
+                    if is_let {
+                        err.span_suggestion_verbose(
+                            non_item_span,
+                            "consider using `const` instead of `let` for associated const",
+                            "const",
+                            Applicability::MachineApplicable,
+                        );
+                    } else {
+                        err.span_label(open_brace_span, "item list starts here")
+                            .span_label(non_item_span, "non-item starts here")
+                            .span_label(self.prev_token.span, "item list ends here");
+                    }
+                    if is_unnecessary_semicolon {
+                        err.span_suggestion(
+                            semicolon_span,
+                            "consider removing this semicolon",
+                            "",
+                            Applicability::MaybeIncorrect,
+                        );
+                    }
+                    err.emit();
+                    break;
+                }
+                Ok(Some(item)) => items.extend(item),
+                Err(err) => {
+                    self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
+                    err.with_span_label(
+                        open_brace_span,
+                        "while parsing this item list starting here",
+                    )
+                    .with_span_label(self.prev_token.span, "the item list ends here")
+                    .emit();
+                    break;
+                }
+            }
+        }
+        Ok(items)
+    }
+
+    /// Recover on a doc comment before `}`.
+    fn recover_doc_comment_before_brace(&mut self) -> bool {
+        if let token::DocComment(..) = self.token.kind {
+            if self.look_ahead(1, |tok| tok == &token::CloseBrace) {
+                // FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
+                struct_span_code_err!(
+                    self.dcx(),
+                    self.token.span,
+                    E0584,
+                    "found a documentation comment that doesn't document anything",
+                )
+                .with_span_label(self.token.span, "this doc comment doesn't document anything")
+                .with_help(
+                    "doc comments must come before what they document, if a comment was \
+                    intended use `//`",
+                )
+                .emit();
+                self.bump();
+                return true;
+            }
+        }
+        false
+    }
+
+    /// Parses defaultness (i.e., `default` or nothing).
+    fn parse_defaultness(&mut self) -> Defaultness {
+        // We are interested in `default` followed by another identifier.
+        // However, we must avoid keywords that occur as binary operators.
+        // Currently, the only applicable keyword is `as` (`default as Ty`).
+        if self.check_keyword(exp!(Default))
+            && self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
+        {
+            self.bump(); // `default`
+            Defaultness::Default(self.prev_token_uninterpolated_span())
+        } else {
+            Defaultness::Final
+        }
+    }
+
+    /// Is this an `(const unsafe? auto?| unsafe auto? | auto) trait` item?
+    fn check_trait_front_matter(&mut self) -> bool {
+        // auto trait
+        self.check_keyword(exp!(Auto)) && self.is_keyword_ahead(1, &[kw::Trait])
+            // unsafe auto trait
+            || self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
+            || self.check_keyword(exp!(Const)) && ((self.is_keyword_ahead(1, &[kw::Trait]) || self.is_keyword_ahead(1, &[kw::Auto]) && self.is_keyword_ahead(2, &[kw::Trait]))
+                || self.is_keyword_ahead(1, &[kw::Unsafe]) && self.is_keyword_ahead(2, &[kw::Trait, kw::Auto]))
+    }
+
+    /// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
+    fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemKind> {
+        let constness = self.parse_constness(Case::Sensitive);
+        if let Const::Yes(span) = constness {
+            self.psess.gated_spans.gate(sym::const_trait_impl, span);
+        }
+        let safety = self.parse_safety(Case::Sensitive);
+        // Parse optional `auto` prefix.
+        let is_auto = if self.eat_keyword(exp!(Auto)) {
+            self.psess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
+            IsAuto::Yes
+        } else {
+            IsAuto::No
+        };
+
+        self.expect_keyword(exp!(Trait))?;
+        let ident = self.parse_ident()?;
+        let mut generics = self.parse_generics()?;
+
+        // Parse optional colon and supertrait bounds.
+        let had_colon = self.eat(exp!(Colon));
+        let span_at_colon = self.prev_token.span;
+        let bounds = if had_colon { self.parse_generic_bounds()? } else { Vec::new() };
+
+        let span_before_eq = self.prev_token.span;
+        if self.eat(exp!(Eq)) {
+            // It's a trait alias.
+            if had_colon {
+                let span = span_at_colon.to(span_before_eq);
+                self.dcx().emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
+            }
+
+            let bounds = self.parse_generic_bounds()?;
+            generics.where_clause = self.parse_where_clause()?;
+            self.expect_semi()?;
+
+            let whole_span = lo.to(self.prev_token.span);
+            if let Const::Yes(_) = constness {
+                self.dcx().emit_err(errors::TraitAliasCannotBeConst { span: whole_span });
+            }
+            if is_auto == IsAuto::Yes {
+                self.dcx().emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
+            }
+            if let Safety::Unsafe(_) = safety {
+                self.dcx().emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
+            }
+
+            self.psess.gated_spans.gate(sym::trait_alias, whole_span);
+
+            Ok(ItemKind::TraitAlias(ident, generics, bounds))
+        } else {
+            // It's a normal trait.
+            generics.where_clause = self.parse_where_clause()?;
+            let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?;
+            Ok(ItemKind::Trait(Box::new(Trait {
+                constness,
+                is_auto,
+                safety,
+                ident,
+                generics,
+                bounds,
+                items,
+            })))
+        }
+    }
+
+    pub fn parse_impl_item(
+        &mut self,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
+        let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
+        self.parse_assoc_item(fn_parse_mode, force_collect)
+    }
+
+    pub fn parse_trait_item(
+        &mut self,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
+        let fn_parse_mode =
+            FnParseMode { req_name: |edition| edition >= Edition::Edition2018, req_body: false };
+        self.parse_assoc_item(fn_parse_mode, force_collect)
+    }
+
+    /// Parses associated items.
+    fn parse_assoc_item(
+        &mut self,
+        fn_parse_mode: FnParseMode,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
+        Ok(self.parse_item_(fn_parse_mode, force_collect)?.map(
+            |Item { attrs, id, span, vis, kind, tokens }| {
+                let kind = match AssocItemKind::try_from(kind) {
+                    Ok(kind) => kind,
+                    Err(kind) => match kind {
+                        ItemKind::Static(box StaticItem {
+                            ident,
+                            ty,
+                            safety: _,
+                            mutability: _,
+                            expr,
+                            define_opaque,
+                        }) => {
+                            self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span });
+                            AssocItemKind::Const(Box::new(ConstItem {
+                                defaultness: Defaultness::Final,
+                                ident,
+                                generics: Generics::default(),
+                                ty,
+                                expr,
+                                define_opaque,
+                            }))
+                        }
+                        _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
+                    },
+                };
+                Some(P(Item { attrs, id, span, vis, kind, tokens }))
+            },
+        ))
+    }
+
+    /// Parses a `type` alias with the following grammar:
+    /// ```ebnf
+    /// TypeAlias = "type" Ident Generics (":" GenericBounds)? WhereClause ("=" Ty)? WhereClause ";" ;
+    /// ```
+    /// The `"type"` has already been eaten.
+    fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
+        let mut generics = self.parse_generics()?;
+
+        // Parse optional colon and param bounds.
+        let bounds = if self.eat(exp!(Colon)) { self.parse_generic_bounds()? } else { Vec::new() };
+        let before_where_clause = self.parse_where_clause()?;
+
+        let ty = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
+
+        let after_where_clause = self.parse_where_clause()?;
+
+        let where_clauses = TyAliasWhereClauses {
+            before: TyAliasWhereClause {
+                has_where_token: before_where_clause.has_where_token,
+                span: before_where_clause.span,
+            },
+            after: TyAliasWhereClause {
+                has_where_token: after_where_clause.has_where_token,
+                span: after_where_clause.span,
+            },
+            split: before_where_clause.predicates.len(),
+        };
+        let mut predicates = before_where_clause.predicates;
+        predicates.extend(after_where_clause.predicates);
+        let where_clause = WhereClause {
+            has_where_token: before_where_clause.has_where_token
+                || after_where_clause.has_where_token,
+            predicates,
+            span: DUMMY_SP,
+        };
+        generics.where_clause = where_clause;
+
+        self.expect_semi()?;
+
+        Ok(ItemKind::TyAlias(Box::new(TyAlias {
+            defaultness,
+            ident,
+            generics,
+            where_clauses,
+            bounds,
+            ty,
+        })))
+    }
+
+    /// Parses a `UseTree`.
+    ///
+    /// ```text
+    /// USE_TREE = [`::`] `*` |
+    ///            [`::`] `{` USE_TREE_LIST `}` |
+    ///            PATH `::` `*` |
+    ///            PATH `::` `{` USE_TREE_LIST `}` |
+    ///            PATH [`as` IDENT]
+    /// ```
+    fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
+        let lo = self.token.span;
+
+        let mut prefix =
+            ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
+        let kind =
+            if self.check(exp!(OpenBrace)) || self.check(exp!(Star)) || self.is_import_coupler() {
+                // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
+                let mod_sep_ctxt = self.token.span.ctxt();
+                if self.eat_path_sep() {
+                    prefix
+                        .segments
+                        .push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
+                }
+
+                self.parse_use_tree_glob_or_nested()?
+            } else {
+                // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
+                prefix = self.parse_path(PathStyle::Mod)?;
+
+                if self.eat_path_sep() {
+                    self.parse_use_tree_glob_or_nested()?
+                } else {
+                    // Recover from using a colon as path separator.
+                    while self.eat_noexpect(&token::Colon) {
+                        self.dcx()
+                            .emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
+
+                        // We parse the rest of the path and append it to the original prefix.
+                        self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
+                        prefix.span = lo.to(self.prev_token.span);
+                    }
+
+                    UseTreeKind::Simple(self.parse_rename()?)
+                }
+            };
+
+        Ok(UseTree { prefix, kind, span: lo.to(self.prev_token.span) })
+    }
+
+    /// Parses `*` or `{...}`.
+    fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
+        Ok(if self.eat(exp!(Star)) {
+            UseTreeKind::Glob
+        } else {
+            let lo = self.token.span;
+            UseTreeKind::Nested {
+                items: self.parse_use_tree_list()?,
+                span: lo.to(self.prev_token.span),
+            }
+        })
+    }
+
+    /// Parses a `UseTreeKind::Nested(list)`.
+    ///
+    /// ```text
+    /// USE_TREE_LIST = ∅ | (USE_TREE `,`)* USE_TREE [`,`]
+    /// ```
+    fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
+        self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
+            p.recover_vcs_conflict_marker();
+            Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
+        })
+        .map(|(r, _)| r)
+    }
+
+    fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
+        if self.eat_keyword(exp!(As)) {
+            self.parse_ident_or_underscore().map(Some)
+        } else {
+            Ok(None)
+        }
+    }
+
+    fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
+        match self.token.ident() {
+            Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => {
+                self.bump();
+                Ok(ident)
+            }
+            _ => self.parse_ident(),
+        }
+    }
+
+    /// Parses `extern crate` links.
+    ///
+    /// # Examples
+    ///
+    /// ```ignore (illustrative)
+    /// extern crate foo;
+    /// extern crate bar as foo;
+    /// ```
+    fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemKind> {
+        // Accept `extern crate name-like-this` for better diagnostics
+        let orig_ident = self.parse_crate_name_with_dashes()?;
+        let (orig_name, item_ident) = if let Some(rename) = self.parse_rename()? {
+            (Some(orig_ident.name), rename)
+        } else {
+            (None, orig_ident)
+        };
+        self.expect_semi()?;
+        Ok(ItemKind::ExternCrate(orig_name, item_ident))
+    }
+
+    fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> {
+        let ident = if self.token.is_keyword(kw::SelfLower) {
+            self.parse_path_segment_ident()
+        } else {
+            self.parse_ident()
+        }?;
+
+        let dash = exp!(Minus);
+        if self.token != *dash.tok {
+            return Ok(ident);
+        }
+
+        // Accept `extern crate name-like-this` for better diagnostics.
+        let mut dashes = vec![];
+        let mut idents = vec![];
+        while self.eat(dash) {
+            dashes.push(self.prev_token.span);
+            idents.push(self.parse_ident()?);
+        }
+
+        let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
+        let mut fixed_name = ident.name.to_string();
+        for part in idents {
+            write!(fixed_name, "_{}", part.name).unwrap();
+        }
+
+        self.dcx().emit_err(errors::ExternCrateNameWithDashes {
+            span: fixed_name_sp,
+            sugg: errors::ExternCrateNameWithDashesSugg { dashes },
+        });
+
+        Ok(Ident::from_str_and_span(&fixed_name, fixed_name_sp))
+    }
+
+    /// Parses `extern` for foreign ABIs modules.
+    ///
+    /// `extern` is expected to have been consumed before calling this method.
+    ///
+    /// # Examples
+    ///
+    /// ```ignore (only-for-syntax-highlight)
+    /// extern "C" {}
+    /// extern {}
+    /// ```
+    fn parse_item_foreign_mod(
+        &mut self,
+        attrs: &mut AttrVec,
+        mut safety: Safety,
+    ) -> PResult<'a, ItemKind> {
+        let extern_span = self.prev_token_uninterpolated_span();
+        let abi = self.parse_abi(); // ABI?
+        // FIXME: This recovery should be tested better.
+        if safety == Safety::Default
+            && self.token.is_keyword(kw::Unsafe)
+            && self.look_ahead(1, |t| *t == token::OpenBrace)
+        {
+            self.expect(exp!(OpenBrace)).unwrap_err().emit();
+            safety = Safety::Unsafe(self.token.span);
+            let _ = self.eat_keyword(exp!(Unsafe));
+        }
+        Ok(ItemKind::ForeignMod(ast::ForeignMod {
+            extern_span,
+            safety,
+            abi,
+            items: self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?,
+        }))
+    }
+
+    /// Parses a foreign item (one in an `extern { ... }` block).
+    pub fn parse_foreign_item(
+        &mut self,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
+        let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: false };
+        Ok(self.parse_item_(fn_parse_mode, force_collect)?.map(
+            |Item { attrs, id, span, vis, kind, tokens }| {
+                let kind = match ForeignItemKind::try_from(kind) {
+                    Ok(kind) => kind,
+                    Err(kind) => match kind {
+                        ItemKind::Const(box ConstItem { ident, ty, expr, .. }) => {
+                            let const_span = Some(span.with_hi(ident.span.lo()))
+                                .filter(|span| span.can_be_used_for_suggestions());
+                            self.dcx().emit_err(errors::ExternItemCannotBeConst {
+                                ident_span: ident.span,
+                                const_span,
+                            });
+                            ForeignItemKind::Static(Box::new(StaticItem {
+                                ident,
+                                ty,
+                                mutability: Mutability::Not,
+                                expr,
+                                safety: Safety::Default,
+                                define_opaque: None,
+                            }))
+                        }
+                        _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
+                    },
+                };
+                Some(P(Item { attrs, id, span, vis, kind, tokens }))
+            },
+        ))
+    }
+
+    fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &'static str) -> Option<T> {
+        // FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
+        let span = self.psess.source_map().guess_head_span(span);
+        let descr = kind.descr();
+        let help = match kind {
+            ItemKind::DelegationMac(deleg) if deleg.suffixes.is_none() => false,
+            _ => true,
+        };
+        self.dcx().emit_err(errors::BadItemKind { span, descr, ctx, help });
+        None
+    }
+
+    fn is_use_closure(&self) -> bool {
+        if self.token.is_keyword(kw::Use) {
+            // Check if this could be a closure.
+            self.look_ahead(1, |token| {
+                // Move or Async here would be an error but still we're parsing a closure
+                let dist =
+                    if token.is_keyword(kw::Move) || token.is_keyword(kw::Async) { 2 } else { 1 };
+
+                self.look_ahead(dist, |token| matches!(token.kind, token::Or | token::OrOr))
+            })
+        } else {
+            false
+        }
+    }
+
+    fn is_unsafe_foreign_mod(&self) -> bool {
+        // Look for `unsafe`.
+        if !self.token.is_keyword(kw::Unsafe) {
+            return false;
+        }
+        // Look for `extern`.
+        if !self.is_keyword_ahead(1, &[kw::Extern]) {
+            return false;
+        }
+
+        // Look for the optional ABI string literal.
+        let n = if self.look_ahead(2, |t| t.can_begin_string_literal()) { 3 } else { 2 };
+
+        // Look for the `{`. Use `tree_look_ahead` because the ABI (if present)
+        // might be a metavariable i.e. an invisible-delimited sequence, and
+        // `tree_look_ahead` will consider that a single element when looking
+        // ahead.
+        self.tree_look_ahead(n, |t| matches!(t, TokenTree::Delimited(_, _, Delimiter::Brace, _)))
+            == Some(true)
+    }
+
+    fn is_static_global(&mut self) -> bool {
+        if self.check_keyword(exp!(Static)) {
+            // Check if this could be a closure.
+            !self.look_ahead(1, |token| {
+                if token.is_keyword(kw::Move) || token.is_keyword(kw::Use) {
+                    return true;
+                }
+                matches!(token.kind, token::Or | token::OrOr)
+            })
+        } else {
+            // `$qual static`
+            (self.check_keyword(exp!(Unsafe)) || self.check_keyword(exp!(Safe)))
+                && self.look_ahead(1, |t| t.is_keyword(kw::Static))
+        }
+    }
+
+    /// Recover on `const mut` with `const` already eaten.
+    fn recover_const_mut(&mut self, const_span: Span) {
+        if self.eat_keyword(exp!(Mut)) {
+            let span = self.prev_token.span;
+            self.dcx()
+                .emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
+        } else if self.eat_keyword(exp!(Let)) {
+            let span = self.prev_token.span;
+            self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
+        }
+    }
+
+    /// Recover on `const impl` with `const` already eaten.
+    fn recover_const_impl(
+        &mut self,
+        const_span: Span,
+        attrs: &mut AttrVec,
+        defaultness: Defaultness,
+    ) -> PResult<'a, ItemKind> {
+        let impl_span = self.token.span;
+        let err = self.expected_ident_found_err();
+
+        // Only try to recover if this is implementing a trait for a type
+        let mut item_kind = match self.parse_item_impl(attrs, defaultness) {
+            Ok(item_kind) => item_kind,
+            Err(recovery_error) => {
+                // Recovery failed, raise the "expected identifier" error
+                recovery_error.cancel();
+                return Err(err);
+            }
+        };
+
+        match &mut item_kind {
+            ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
+                *constness = Const::Yes(const_span);
+
+                let before_trait = trai.path.span.shrink_to_lo();
+                let const_up_to_impl = const_span.with_hi(impl_span.lo());
+                err.with_multipart_suggestion(
+                    "you might have meant to write a const trait impl",
+                    vec![(const_up_to_impl, "".to_owned()), (before_trait, "const ".to_owned())],
+                    Applicability::MaybeIncorrect,
+                )
+                .emit();
+            }
+            ItemKind::Impl { .. } => return Err(err),
+            _ => unreachable!(),
+        }
+
+        Ok(item_kind)
+    }
+
+    /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in
+    /// `mutability`.
+    ///
+    /// ```ebnf
+    /// Static = "static" "mut"? $ident ":" $ty (= $expr)? ";" ;
+    /// ```
+    fn parse_static_item(
+        &mut self,
+        safety: Safety,
+        mutability: Mutability,
+    ) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
+
+        if self.token == TokenKind::Lt && self.may_recover() {
+            let generics = self.parse_generics()?;
+            self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
+        }
+
+        // Parse the type of a static item. That is, the `":" $ty` fragment.
+        // FIXME: This could maybe benefit from `.may_recover()`?
+        let ty = match (self.eat(exp!(Colon)), self.check(exp!(Eq)) | self.check(exp!(Semi))) {
+            (true, false) => self.parse_ty()?,
+            // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing
+            // type.
+            (colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)),
+        };
+
+        let expr = if self.eat(exp!(Eq)) { Some(self.parse_expr()?) } else { None };
+
+        self.expect_semi()?;
+
+        let item = StaticItem { ident, ty, safety, mutability, expr, define_opaque: None };
+        Ok(ItemKind::Static(Box::new(item)))
+    }
+
+    /// Parse a constant item with the prefix `"const"` already parsed.
+    ///
+    /// ```ebnf
+    /// Const = "const" ($ident | "_") Generics ":" $ty (= $expr)? WhereClause ";" ;
+    /// ```
+    fn parse_const_item(&mut self) -> PResult<'a, (Ident, Generics, P<Ty>, Option<P<ast::Expr>>)> {
+        let ident = self.parse_ident_or_underscore()?;
+
+        let mut generics = self.parse_generics()?;
+
+        // Check the span for emptiness instead of the list of parameters in order to correctly
+        // recognize and subsequently flag empty parameter lists (`<>`) as unstable.
+        if !generics.span.is_empty() {
+            self.psess.gated_spans.gate(sym::generic_const_items, generics.span);
+        }
+
+        // Parse the type of a constant item. That is, the `":" $ty` fragment.
+        // FIXME: This could maybe benefit from `.may_recover()`?
+        let ty = match (
+            self.eat(exp!(Colon)),
+            self.check(exp!(Eq)) | self.check(exp!(Semi)) | self.check_keyword(exp!(Where)),
+        ) {
+            (true, false) => self.parse_ty()?,
+            // If there wasn't a `:` or the colon was followed by a `=`, `;` or `where`, recover a missing type.
+            (colon, _) => self.recover_missing_global_item_type(colon, None),
+        };
+
+        // Proactively parse a where-clause to be able to provide a good error message in case we
+        // encounter the item body following it.
+        let before_where_clause =
+            if self.may_recover() { self.parse_where_clause()? } else { WhereClause::default() };
+
+        let expr = if self.eat(exp!(Eq)) { Some(self.parse_expr()?) } else { None };
+
+        let after_where_clause = self.parse_where_clause()?;
+
+        // Provide a nice error message if the user placed a where-clause before the item body.
+        // Users may be tempted to write such code if they are still used to the deprecated
+        // where-clause location on type aliases and associated types. See also #89122.
+        if before_where_clause.has_where_token
+            && let Some(expr) = &expr
+        {
+            self.dcx().emit_err(errors::WhereClauseBeforeConstBody {
+                span: before_where_clause.span,
+                name: ident.span,
+                body: expr.span,
+                sugg: if !after_where_clause.has_where_token {
+                    self.psess.source_map().span_to_snippet(expr.span).ok().map(|body| {
+                        errors::WhereClauseBeforeConstBodySugg {
+                            left: before_where_clause.span.shrink_to_lo(),
+                            snippet: body,
+                            right: before_where_clause.span.shrink_to_hi().to(expr.span),
+                        }
+                    })
+                } else {
+                    // FIXME(generic_const_items): Provide a structured suggestion to merge the first
+                    // where-clause into the second one.
+                    None
+                },
+            });
+        }
+
+        // Merge the predicates of both where-clauses since either one can be relevant.
+        // If we didn't parse a body (which is valid for associated consts in traits) and we were
+        // allowed to recover, `before_where_clause` contains the predicates, otherwise they are
+        // in `after_where_clause`. Further, both of them might contain predicates iff two
+        // where-clauses were provided which is syntactically ill-formed but we want to recover from
+        // it and treat them as one large where-clause.
+        let mut predicates = before_where_clause.predicates;
+        predicates.extend(after_where_clause.predicates);
+        let where_clause = WhereClause {
+            has_where_token: before_where_clause.has_where_token
+                || after_where_clause.has_where_token,
+            predicates,
+            span: if after_where_clause.has_where_token {
+                after_where_clause.span
+            } else {
+                before_where_clause.span
+            },
+        };
+
+        if where_clause.has_where_token {
+            self.psess.gated_spans.gate(sym::generic_const_items, where_clause.span);
+        }
+
+        generics.where_clause = where_clause;
+
+        self.expect_semi()?;
+
+        Ok((ident, generics, ty, expr))
+    }
+
+    /// We were supposed to parse `":" $ty` but the `:` or the type was missing.
+    /// This means that the type is missing.
+    fn recover_missing_global_item_type(
+        &mut self,
+        colon_present: bool,
+        m: Option<Mutability>,
+    ) -> P<Ty> {
+        // Construct the error and stash it away with the hope
+        // that typeck will later enrich the error with a type.
+        let kind = match m {
+            Some(Mutability::Mut) => "static mut",
+            Some(Mutability::Not) => "static",
+            None => "const",
+        };
+
+        let colon = match colon_present {
+            true => "",
+            false => ":",
+        };
+
+        let span = self.prev_token.span.shrink_to_hi();
+        let err = self.dcx().create_err(errors::MissingConstType { span, colon, kind });
+        err.stash(span, StashKey::ItemNoType);
+
+        // The user intended that the type be inferred,
+        // so treat this as if the user wrote e.g. `const A: _ = expr;`.
+        P(Ty { kind: TyKind::Infer, span, id: ast::DUMMY_NODE_ID, tokens: None })
+    }
+
+    /// Parses an enum declaration.
+    fn parse_item_enum(&mut self) -> PResult<'a, ItemKind> {
+        if self.token.is_keyword(kw::Struct) {
+            let span = self.prev_token.span.to(self.token.span);
+            let err = errors::EnumStructMutuallyExclusive { span };
+            if self.look_ahead(1, |t| t.is_ident()) {
+                self.bump();
+                self.dcx().emit_err(err);
+            } else {
+                return Err(self.dcx().create_err(err));
+            }
+        }
+
+        let prev_span = self.prev_token.span;
+        let ident = self.parse_ident()?;
+        let mut generics = self.parse_generics()?;
+        generics.where_clause = self.parse_where_clause()?;
+
+        // Possibly recover `enum Foo;` instead of `enum Foo {}`
+        let (variants, _) = if self.token == TokenKind::Semi {
+            self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
+            self.bump();
+            (thin_vec![], Trailing::No)
+        } else {
+            self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
+                p.parse_enum_variant(ident.span)
+            })
+            .map_err(|mut err| {
+                err.span_label(ident.span, "while parsing this enum");
+                // Try to recover `enum Foo { ident : Ty }`.
+                if self.prev_token.is_non_reserved_ident() && self.token == token::Colon {
+                    let snapshot = self.create_snapshot_for_diagnostic();
+                    self.bump();
+                    match self.parse_ty() {
+                        Ok(_) => {
+                            err.span_suggestion_verbose(
+                                prev_span,
+                                "perhaps you meant to use `struct` here",
+                                "struct",
+                                Applicability::MaybeIncorrect,
+                            );
+                        }
+                        Err(e) => {
+                            e.cancel();
+                        }
+                    }
+                    self.restore_snapshot(snapshot);
+                }
+                self.eat_to_tokens(&[exp!(CloseBrace)]);
+                self.bump(); // }
+                err
+            })?
+        };
+
+        let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
+        Ok(ItemKind::Enum(ident, generics, enum_definition))
+    }
+
+    fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
+        self.recover_vcs_conflict_marker();
+        let variant_attrs = self.parse_outer_attributes()?;
+        self.recover_vcs_conflict_marker();
+        let help = "enum variants can be `Variant`, `Variant = <integer>`, \
+                    `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
+        self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| {
+            let vlo = this.token.span;
+
+            let vis = this.parse_visibility(FollowedByType::No)?;
+            if !this.recover_nested_adt_item(kw::Enum)? {
+                return Ok((None, Trailing::No, UsePreAttrPos::No));
+            }
+            let ident = this.parse_field_ident("enum", vlo)?;
+
+            if this.token == token::Bang {
+                if let Err(err) = this.unexpected() {
+                    err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
+                }
+
+                this.bump();
+                this.parse_delim_args()?;
+
+                return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
+            }
+
+            let struct_def = if this.check(exp!(OpenBrace)) {
+                // Parse a struct variant.
+                let (fields, recovered) =
+                    match this.parse_record_struct_body("struct", ident.span, false) {
+                        Ok((fields, recovered)) => (fields, recovered),
+                        Err(mut err) => {
+                            if this.token == token::Colon {
+                                // We handle `enum` to `struct` suggestion in the caller.
+                                return Err(err);
+                            }
+                            this.eat_to_tokens(&[exp!(CloseBrace)]);
+                            this.bump(); // }
+                            err.span_label(span, "while parsing this enum");
+                            err.help(help);
+                            let guar = err.emit();
+                            (thin_vec![], Recovered::Yes(guar))
+                        }
+                    };
+                VariantData::Struct { fields, recovered }
+            } else if this.check(exp!(OpenParen)) {
+                let body = match this.parse_tuple_struct_body() {
+                    Ok(body) => body,
+                    Err(mut err) => {
+                        if this.token == token::Colon {
+                            // We handle `enum` to `struct` suggestion in the caller.
+                            return Err(err);
+                        }
+                        this.eat_to_tokens(&[exp!(CloseParen)]);
+                        this.bump(); // )
+                        err.span_label(span, "while parsing this enum");
+                        err.help(help);
+                        err.emit();
+                        thin_vec![]
+                    }
+                };
+                VariantData::Tuple(body, DUMMY_NODE_ID)
+            } else {
+                VariantData::Unit(DUMMY_NODE_ID)
+            };
+
+            let disr_expr =
+                if this.eat(exp!(Eq)) { Some(this.parse_expr_anon_const()?) } else { None };
+
+            let vr = ast::Variant {
+                ident,
+                vis,
+                id: DUMMY_NODE_ID,
+                attrs: variant_attrs,
+                data: struct_def,
+                disr_expr,
+                span: vlo.to(this.prev_token.span),
+                is_placeholder: false,
+            };
+
+            Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No))
+        })
+        .map_err(|mut err| {
+            err.help(help);
+            err
+        })
+    }
+
+    /// Parses `struct Foo { ... }`.
+    fn parse_item_struct(&mut self) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
+
+        let mut generics = self.parse_generics()?;
+
+        // There is a special case worth noting here, as reported in issue #17904.
+        // If we are parsing a tuple struct it is the case that the where clause
+        // should follow the field list. Like so:
+        //
+        // struct Foo<T>(T) where T: Copy;
+        //
+        // If we are parsing a normal record-style struct it is the case
+        // that the where clause comes before the body, and after the generics.
+        // So if we look ahead and see a brace or a where-clause we begin
+        // parsing a record style struct.
+        //
+        // Otherwise if we look ahead and see a paren we parse a tuple-style
+        // struct.
+
+        let vdata = if self.token.is_keyword(kw::Where) {
+            let tuple_struct_body;
+            (generics.where_clause, tuple_struct_body) =
+                self.parse_struct_where_clause(ident, generics.span)?;
+
+            if let Some(body) = tuple_struct_body {
+                // If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);`
+                let body = VariantData::Tuple(body, DUMMY_NODE_ID);
+                self.expect_semi()?;
+                body
+            } else if self.eat(exp!(Semi)) {
+                // If we see a: `struct Foo<T> where T: Copy;` style decl.
+                VariantData::Unit(DUMMY_NODE_ID)
+            } else {
+                // If we see: `struct Foo<T> where T: Copy { ... }`
+                let (fields, recovered) = self.parse_record_struct_body(
+                    "struct",
+                    ident.span,
+                    generics.where_clause.has_where_token,
+                )?;
+                VariantData::Struct { fields, recovered }
+            }
+        // No `where` so: `struct Foo<T>;`
+        } else if self.eat(exp!(Semi)) {
+            VariantData::Unit(DUMMY_NODE_ID)
+        // Record-style struct definition
+        } else if self.token == token::OpenBrace {
+            let (fields, recovered) = self.parse_record_struct_body(
+                "struct",
+                ident.span,
+                generics.where_clause.has_where_token,
+            )?;
+            VariantData::Struct { fields, recovered }
+        // Tuple-style struct definition with optional where-clause.
+        } else if self.token == token::OpenParen {
+            let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
+            generics.where_clause = self.parse_where_clause()?;
+            self.expect_semi()?;
+            body
+        } else {
+            let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token);
+            return Err(self.dcx().create_err(err));
+        };
+
+        Ok(ItemKind::Struct(ident, generics, vdata))
+    }
+
+    /// Parses `union Foo { ... }`.
+    fn parse_item_union(&mut self) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
+
+        let mut generics = self.parse_generics()?;
+
+        let vdata = if self.token.is_keyword(kw::Where) {
+            generics.where_clause = self.parse_where_clause()?;
+            let (fields, recovered) = self.parse_record_struct_body(
+                "union",
+                ident.span,
+                generics.where_clause.has_where_token,
+            )?;
+            VariantData::Struct { fields, recovered }
+        } else if self.token == token::OpenBrace {
+            let (fields, recovered) = self.parse_record_struct_body(
+                "union",
+                ident.span,
+                generics.where_clause.has_where_token,
+            )?;
+            VariantData::Struct { fields, recovered }
+        } else {
+            let token_str = super::token_descr(&self.token);
+            let msg = format!("expected `where` or `{{` after union name, found {token_str}");
+            let mut err = self.dcx().struct_span_err(self.token.span, msg);
+            err.span_label(self.token.span, "expected `where` or `{` after union name");
+            return Err(err);
+        };
+
+        Ok(ItemKind::Union(ident, generics, vdata))
+    }
+
+    /// This function parses the fields of record structs:
+    ///
+    ///   - `struct S { ... }`
+    ///   - `enum E { Variant { ... } }`
+    pub(crate) fn parse_record_struct_body(
+        &mut self,
+        adt_ty: &str,
+        ident_span: Span,
+        parsed_where: bool,
+    ) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
+        let mut fields = ThinVec::new();
+        let mut recovered = Recovered::No;
+        if self.eat(exp!(OpenBrace)) {
+            while self.token != token::CloseBrace {
+                match self.parse_field_def(adt_ty, ident_span) {
+                    Ok(field) => {
+                        fields.push(field);
+                    }
+                    Err(mut err) => {
+                        self.consume_block(
+                            exp!(OpenBrace),
+                            exp!(CloseBrace),
+                            ConsumeClosingDelim::No,
+                        );
+                        err.span_label(ident_span, format!("while parsing this {adt_ty}"));
+                        let guar = err.emit();
+                        recovered = Recovered::Yes(guar);
+                        break;
+                    }
+                }
+            }
+            self.expect(exp!(CloseBrace))?;
+        } else {
+            let token_str = super::token_descr(&self.token);
+            let where_str = if parsed_where { "" } else { "`where`, or " };
+            let msg = format!("expected {where_str}`{{` after struct name, found {token_str}");
+            let mut err = self.dcx().struct_span_err(self.token.span, msg);
+            err.span_label(self.token.span, format!("expected {where_str}`{{` after struct name",));
+            return Err(err);
+        }
+
+        Ok((fields, recovered))
+    }
+
+    fn parse_unsafe_field(&mut self) -> Safety {
+        // not using parse_safety as that also accepts `safe`.
+        if self.eat_keyword(exp!(Unsafe)) {
+            let span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::unsafe_fields, span);
+            Safety::Unsafe(span)
+        } else {
+            Safety::Default
+        }
+    }
+
+    pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef>> {
+        // This is the case where we find `struct Foo<T>(T) where T: Copy;`
+        // Unit like structs are handled in parse_item_struct function
+        self.parse_paren_comma_seq(|p| {
+            let attrs = p.parse_outer_attributes()?;
+            p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
+                let mut snapshot = None;
+                if p.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
+                    // Account for `<<<<<<<` diff markers. We can't proactively error here because
+                    // that can be a valid type start, so we snapshot and reparse only we've
+                    // encountered another parse error.
+                    snapshot = Some(p.create_snapshot_for_diagnostic());
+                }
+                let lo = p.token.span;
+                let vis = match p.parse_visibility(FollowedByType::Yes) {
+                    Ok(vis) => vis,
+                    Err(err) => {
+                        if let Some(ref mut snapshot) = snapshot {
+                            snapshot.recover_vcs_conflict_marker();
+                        }
+                        return Err(err);
+                    }
+                };
+                // Unsafe fields are not supported in tuple structs, as doing so would result in a
+                // parsing ambiguity for `struct X(unsafe fn())`.
+                let ty = match p.parse_ty() {
+                    Ok(ty) => ty,
+                    Err(err) => {
+                        if let Some(ref mut snapshot) = snapshot {
+                            snapshot.recover_vcs_conflict_marker();
+                        }
+                        return Err(err);
+                    }
+                };
+                let mut default = None;
+                if p.token == token::Eq {
+                    let mut snapshot = p.create_snapshot_for_diagnostic();
+                    snapshot.bump();
+                    match snapshot.parse_expr_anon_const() {
+                        Ok(const_expr) => {
+                            let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
+                            p.psess.gated_spans.gate(sym::default_field_values, sp);
+                            p.restore_snapshot(snapshot);
+                            default = Some(const_expr);
+                        }
+                        Err(err) => {
+                            err.cancel();
+                        }
+                    }
+                }
+
+                Ok((
+                    FieldDef {
+                        span: lo.to(ty.span),
+                        vis,
+                        safety: Safety::Default,
+                        ident: None,
+                        id: DUMMY_NODE_ID,
+                        ty,
+                        default,
+                        attrs,
+                        is_placeholder: false,
+                    },
+                    Trailing::from(p.token == token::Comma),
+                    UsePreAttrPos::No,
+                ))
+            })
+        })
+        .map(|(r, _)| r)
+    }
+
+    /// Parses an element of a struct declaration.
+    fn parse_field_def(&mut self, adt_ty: &str, ident_span: Span) -> PResult<'a, FieldDef> {
+        self.recover_vcs_conflict_marker();
+        let attrs = self.parse_outer_attributes()?;
+        self.recover_vcs_conflict_marker();
+        self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            let lo = this.token.span;
+            let vis = this.parse_visibility(FollowedByType::No)?;
+            let safety = this.parse_unsafe_field();
+            this.parse_single_struct_field(adt_ty, lo, vis, safety, attrs, ident_span)
+                .map(|field| (field, Trailing::No, UsePreAttrPos::No))
+        })
+    }
+
+    /// Parses a structure field declaration.
+    fn parse_single_struct_field(
+        &mut self,
+        adt_ty: &str,
+        lo: Span,
+        vis: Visibility,
+        safety: Safety,
+        attrs: AttrVec,
+        ident_span: Span,
+    ) -> PResult<'a, FieldDef> {
+        let a_var = self.parse_name_and_ty(adt_ty, lo, vis, safety, attrs)?;
+        match self.token.kind {
+            token::Comma => {
+                self.bump();
+            }
+            token::Semi => {
+                self.bump();
+                let sp = self.prev_token.span;
+                let mut err =
+                    self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`"));
+                err.span_suggestion_short(
+                    sp,
+                    "replace `;` with `,`",
+                    ",",
+                    Applicability::MachineApplicable,
+                );
+                err.span_label(ident_span, format!("while parsing this {adt_ty}"));
+                err.emit();
+            }
+            token::CloseBrace => {}
+            token::DocComment(..) => {
+                let previous_span = self.prev_token.span;
+                let mut err = errors::DocCommentDoesNotDocumentAnything {
+                    span: self.token.span,
+                    missing_comma: None,
+                };
+                self.bump(); // consume the doc comment
+                if self.eat(exp!(Comma)) || self.token == token::CloseBrace {
+                    self.dcx().emit_err(err);
+                } else {
+                    let sp = previous_span.shrink_to_hi();
+                    err.missing_comma = Some(sp);
+                    return Err(self.dcx().create_err(err));
+                }
+            }
+            _ => {
+                let sp = self.prev_token.span.shrink_to_hi();
+                let msg =
+                    format!("expected `,`, or `}}`, found {}", super::token_descr(&self.token));
+
+                // Try to recover extra trailing angle brackets
+                if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind
+                    && let Some(last_segment) = segments.last()
+                {
+                    let guar = self.check_trailing_angle_brackets(
+                        last_segment,
+                        &[exp!(Comma), exp!(CloseBrace)],
+                    );
+                    if let Some(_guar) = guar {
+                        // Handle a case like `Vec<u8>>,` where we can continue parsing fields
+                        // after the comma
+                        let _ = self.eat(exp!(Comma));
+
+                        // `check_trailing_angle_brackets` already emitted a nicer error, as
+                        // proven by the presence of `_guar`. We can continue parsing.
+                        return Ok(a_var);
+                    }
+                }
+
+                let mut err = self.dcx().struct_span_err(sp, msg);
+
+                if self.token.is_ident()
+                    || (self.token == TokenKind::Pound
+                        && (self.look_ahead(1, |t| t == &token::OpenBracket)))
+                {
+                    // This is likely another field, TokenKind::Pound is used for `#[..]`
+                    // attribute for next field. Emit the diagnostic and continue parsing.
+                    err.span_suggestion(
+                        sp,
+                        "try adding a comma",
+                        ",",
+                        Applicability::MachineApplicable,
+                    );
+                    err.emit();
+                } else {
+                    return Err(err);
+                }
+            }
+        }
+        Ok(a_var)
+    }
+
+    fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
+        if let Err(err) = self.expect(exp!(Colon)) {
+            let sm = self.psess.source_map();
+            let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start());
+            let semi_typo = self.token == token::Semi
+                && self.look_ahead(1, |t| {
+                    t.is_path_start()
+                    // We check that we are in a situation like `foo; bar` to avoid bad suggestions
+                    // when there's no type and `;` was used instead of a comma.
+                    && match (sm.lookup_line(self.token.span.hi()), sm.lookup_line(t.span.lo())) {
+                        (Ok(l), Ok(r)) => l.line == r.line,
+                        _ => true,
+                    }
+                });
+            if eq_typo || semi_typo {
+                self.bump();
+                // Gracefully handle small typos.
+                err.with_span_suggestion_short(
+                    self.prev_token.span,
+                    "field names and their types are separated with `:`",
+                    ":",
+                    Applicability::MachineApplicable,
+                )
+                .emit();
+            } else {
+                return Err(err);
+            }
+        }
+        Ok(())
+    }
+
+    /// Parses a structure field.
+    fn parse_name_and_ty(
+        &mut self,
+        adt_ty: &str,
+        lo: Span,
+        vis: Visibility,
+        safety: Safety,
+        attrs: AttrVec,
+    ) -> PResult<'a, FieldDef> {
+        let name = self.parse_field_ident(adt_ty, lo)?;
+        if self.token == token::Bang {
+            if let Err(mut err) = self.unexpected() {
+                // Encounter the macro invocation
+                err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
+                return Err(err);
+            }
+        }
+        self.expect_field_ty_separator()?;
+        let ty = self.parse_ty()?;
+        if self.token == token::Colon && self.look_ahead(1, |&t| t != token::Colon) {
+            self.dcx()
+                .struct_span_err(self.token.span, "found single colon in a struct field type path")
+                .with_span_suggestion_verbose(
+                    self.token.span,
+                    "write a path separator here",
+                    "::",
+                    Applicability::MaybeIncorrect,
+                )
+                .emit();
+        }
+        let default = if self.token == token::Eq {
+            self.bump();
+            let const_expr = self.parse_expr_anon_const()?;
+            let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
+            self.psess.gated_spans.gate(sym::default_field_values, sp);
+            Some(const_expr)
+        } else {
+            None
+        };
+        Ok(FieldDef {
+            span: lo.to(self.prev_token.span),
+            ident: Some(name),
+            vis,
+            safety,
+            id: DUMMY_NODE_ID,
+            ty,
+            default,
+            attrs,
+            is_placeholder: false,
+        })
+    }
+
+    /// Parses a field identifier. Specialized version of `parse_ident_common`
+    /// for better diagnostics and suggestions.
+    fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
+        let (ident, is_raw) = self.ident_or_err(true)?;
+        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
+            let snapshot = self.create_snapshot_for_diagnostic();
+            let err = if self.check_fn_front_matter(false, Case::Sensitive) {
+                let inherited_vis =
+                    Visibility { span: DUMMY_SP, kind: VisibilityKind::Inherited, tokens: None };
+                // We use `parse_fn` to get a span for the function
+                let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
+                match self.parse_fn(
+                    &mut AttrVec::new(),
+                    fn_parse_mode,
+                    lo,
+                    &inherited_vis,
+                    Case::Insensitive,
+                ) {
+                    Ok(_) => {
+                        self.dcx().struct_span_err(
+                            lo.to(self.prev_token.span),
+                            format!("functions are not allowed in {adt_ty} definitions"),
+                        )
+                        .with_help(
+                            "unlike in C++, Java, and C#, functions are declared in `impl` blocks",
+                        )
+                        .with_help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information")
+                    }
+                    Err(err) => {
+                        err.cancel();
+                        self.restore_snapshot(snapshot);
+                        self.expected_ident_found_err()
+                    }
+                }
+            } else if self.eat_keyword(exp!(Struct)) {
+                match self.parse_item_struct() {
+                    Ok(item) => {
+                        let ItemKind::Struct(ident, ..) = item else { unreachable!() };
+                        self.dcx()
+                            .struct_span_err(
+                                lo.with_hi(ident.span.hi()),
+                                format!("structs are not allowed in {adt_ty} definitions"),
+                            )
+                            .with_help(
+                                "consider creating a new `struct` definition instead of nesting",
+                            )
+                    }
+                    Err(err) => {
+                        err.cancel();
+                        self.restore_snapshot(snapshot);
+                        self.expected_ident_found_err()
+                    }
+                }
+            } else {
+                let mut err = self.expected_ident_found_err();
+                if self.eat_keyword_noexpect(kw::Let)
+                    && let removal_span = self.prev_token.span.until(self.token.span)
+                    && let Ok(ident) = self
+                        .parse_ident_common(false)
+                        // Cancel this error, we don't need it.
+                        .map_err(|err| err.cancel())
+                    && self.token == TokenKind::Colon
+                {
+                    err.span_suggestion(
+                        removal_span,
+                        "remove this `let` keyword",
+                        String::new(),
+                        Applicability::MachineApplicable,
+                    );
+                    err.note("the `let` keyword is not allowed in `struct` fields");
+                    err.note("see <https://doc.rust-lang.org/book/ch05-01-defining-structs.html> for more information");
+                    err.emit();
+                    return Ok(ident);
+                } else {
+                    self.restore_snapshot(snapshot);
+                }
+                err
+            };
+            return Err(err);
+        }
+        self.bump();
+        Ok(ident)
+    }
+
+    /// Parses a declarative macro 2.0 definition.
+    /// The `macro` keyword has already been parsed.
+    /// ```ebnf
+    /// MacBody = "{" TOKEN_STREAM "}" ;
+    /// MacParams = "(" TOKEN_STREAM ")" ;
+    /// DeclMac = "macro" Ident MacParams? MacBody ;
+    /// ```
+    fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
+        let body = if self.check(exp!(OpenBrace)) {
+            self.parse_delim_args()? // `MacBody`
+        } else if self.check(exp!(OpenParen)) {
+            let params = self.parse_token_tree(); // `MacParams`
+            let pspan = params.span();
+            if !self.check(exp!(OpenBrace)) {
+                self.unexpected()?;
+            }
+            let body = self.parse_token_tree(); // `MacBody`
+            // Convert `MacParams MacBody` into `{ MacParams => MacBody }`.
+            let bspan = body.span();
+            let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
+            let tokens = TokenStream::new(vec![params, arrow, body]);
+            let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
+            P(DelimArgs { dspan, delim: Delimiter::Brace, tokens })
+        } else {
+            self.unexpected_any()?
+        };
+
+        self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span));
+        Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: false }))
+    }
+
+    /// Is this a possibly malformed start of a `macro_rules! foo` item definition?
+    fn is_macro_rules_item(&mut self) -> IsMacroRulesItem {
+        if self.check_keyword(exp!(MacroRules)) {
+            let macro_rules_span = self.token.span;
+
+            if self.look_ahead(1, |t| *t == token::Bang) && self.look_ahead(2, |t| t.is_ident()) {
+                return IsMacroRulesItem::Yes { has_bang: true };
+            } else if self.look_ahead(1, |t| t.is_ident()) {
+                // macro_rules foo
+                self.dcx().emit_err(errors::MacroRulesMissingBang {
+                    span: macro_rules_span,
+                    hi: macro_rules_span.shrink_to_hi(),
+                });
+
+                return IsMacroRulesItem::Yes { has_bang: false };
+            }
+        }
+
+        IsMacroRulesItem::No
+    }
+
+    /// Parses a `macro_rules! foo { ... }` declarative macro.
+    fn parse_item_macro_rules(
+        &mut self,
+        vis: &Visibility,
+        has_bang: bool,
+    ) -> PResult<'a, ItemKind> {
+        self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
+
+        if has_bang {
+            self.expect(exp!(Bang))?; // `!`
+        }
+        let ident = self.parse_ident()?;
+
+        if self.eat(exp!(Bang)) {
+            // Handle macro_rules! foo!
+            let span = self.prev_token.span;
+            self.dcx().emit_err(errors::MacroNameRemoveBang { span });
+        }
+
+        let body = self.parse_delim_args()?;
+        self.eat_semi_for_macro_if_needed(&body);
+        self.complain_if_pub_macro(vis, true);
+
+        Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: true }))
+    }
+
+    /// Item macro invocations or `macro_rules!` definitions need inherited visibility.
+    /// If that's not the case, emit an error.
+    fn complain_if_pub_macro(&self, vis: &Visibility, macro_rules: bool) {
+        if let VisibilityKind::Inherited = vis.kind {
+            return;
+        }
+
+        let vstr = pprust::vis_to_string(vis);
+        let vstr = vstr.trim_end();
+        if macro_rules {
+            self.dcx().emit_err(errors::MacroRulesVisibility { span: vis.span, vis: vstr });
+        } else {
+            self.dcx().emit_err(errors::MacroInvocationVisibility { span: vis.span, vis: vstr });
+        }
+    }
+
+    fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
+        if args.need_semicolon() && !self.eat(exp!(Semi)) {
+            self.report_invalid_macro_expansion_item(args);
+        }
+    }
+
+    fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) {
+        let span = args.dspan.entire();
+        let mut err = self.dcx().struct_span_err(
+            span,
+            "macros that expand to items must be delimited with braces or followed by a semicolon",
+        );
+        // FIXME: This will make us not emit the help even for declarative
+        // macros within the same crate (that we can fix), which is sad.
+        if !span.from_expansion() {
+            let DelimSpan { open, close } = args.dspan;
+            err.multipart_suggestion(
+                "change the delimiters to curly braces",
+                vec![(open, "{".to_string()), (close, '}'.to_string())],
+                Applicability::MaybeIncorrect,
+            );
+            err.span_suggestion(
+                span.with_neighbor(self.token.span).shrink_to_hi(),
+                "add a semicolon",
+                ';',
+                Applicability::MaybeIncorrect,
+            );
+        }
+        err.emit();
+    }
+
+    /// Checks if current token is one of tokens which cannot be nested like `kw::Enum`. In case
+    /// it is, we try to parse the item and report error about nested types.
+    fn recover_nested_adt_item(&mut self, keyword: Symbol) -> PResult<'a, bool> {
+        if (self.token.is_keyword(kw::Enum)
+            || self.token.is_keyword(kw::Struct)
+            || self.token.is_keyword(kw::Union))
+            && self.look_ahead(1, |t| t.is_ident())
+        {
+            let kw_token = self.token;
+            let kw_str = pprust::token_to_string(&kw_token);
+            let item = self.parse_item(ForceCollect::No)?;
+            let mut item = item.unwrap().span;
+            if self.token == token::Comma {
+                item = item.to(self.token.span);
+            }
+            self.dcx().emit_err(errors::NestedAdt {
+                span: kw_token.span,
+                item,
+                kw_str,
+                keyword: keyword.as_str(),
+            });
+            // We successfully parsed the item but we must inform the caller about nested problem.
+            return Ok(false);
+        }
+        Ok(true)
+    }
+}
+
+/// The parsing configuration used to parse a parameter list (see `parse_fn_params`).
+///
+/// The function decides if, per-parameter `p`, `p` must have a pattern or just a type.
+///
+/// This function pointer accepts an edition, because in edition 2015, trait declarations
+/// were allowed to omit parameter names. In 2018, they became required.
+type ReqName = fn(Edition) -> bool;
+
+/// Parsing configuration for functions.
+///
+/// The syntax of function items is slightly different within trait definitions,
+/// impl blocks, and modules. It is still parsed using the same code, just with
+/// different flags set, so that even when the input is wrong and produces a parse
+/// error, it still gets into the AST and the rest of the parser and
+/// type checker can run.
+#[derive(Clone, Copy)]
+pub(crate) struct FnParseMode {
+    /// A function pointer that decides if, per-parameter `p`, `p` must have a
+    /// pattern or just a type. This field affects parsing of the parameters list.
+    ///
+    /// ```text
+    /// fn foo(alef: A) -> X { X::new() }
+    ///        -----^^ affects parsing this part of the function signature
+    ///        |
+    ///        if req_name returns false, then this name is optional
+    ///
+    /// fn bar(A) -> X;
+    ///        ^
+    ///        |
+    ///        if req_name returns true, this is an error
+    /// ```
+    ///
+    /// Calling this function pointer should only return false if:
+    ///
+    ///   * The item is being parsed inside of a trait definition.
+    ///     Within an impl block or a module, it should always evaluate
+    ///     to true.
+    ///   * The span is from Edition 2015. In particular, you can get a
+    ///     2015 span inside a 2021 crate using macros.
+    pub(super) req_name: ReqName,
+    /// If this flag is set to `true`, then plain, semicolon-terminated function
+    /// prototypes are not allowed here.
+    ///
+    /// ```text
+    /// fn foo(alef: A) -> X { X::new() }
+    ///                      ^^^^^^^^^^^^
+    ///                      |
+    ///                      this is always allowed
+    ///
+    /// fn bar(alef: A, bet: B) -> X;
+    ///                             ^
+    ///                             |
+    ///                             if req_body is set to true, this is an error
+    /// ```
+    ///
+    /// This field should only be set to false if the item is inside of a trait
+    /// definition or extern block. Within an impl block or a module, it should
+    /// always be set to true.
+    pub(super) req_body: bool,
+}
+
+/// Parsing of functions and methods.
+impl<'a> Parser<'a> {
+    /// Parse a function starting from the front matter (`const ...`) to the body `{ ... }` or `;`.
+    fn parse_fn(
+        &mut self,
+        attrs: &mut AttrVec,
+        fn_parse_mode: FnParseMode,
+        sig_lo: Span,
+        vis: &Visibility,
+        case: Case,
+    ) -> PResult<'a, (Ident, FnSig, Generics, Option<P<FnContract>>, Option<P<Block>>)> {
+        let fn_span = self.token.span;
+        let header = self.parse_fn_front_matter(vis, case, FrontMatterParsingMode::Function)?; // `const ... fn`
+        let ident = self.parse_ident()?; // `foo`
+        let mut generics = self.parse_generics()?; // `<'a, T, ...>`
+        let decl = match self.parse_fn_decl(
+            fn_parse_mode.req_name,
+            AllowPlus::Yes,
+            RecoverReturnSign::Yes,
+        ) {
+            Ok(decl) => decl,
+            Err(old_err) => {
+                // If we see `for Ty ...` then user probably meant `impl` item.
+                if self.token.is_keyword(kw::For) {
+                    old_err.cancel();
+                    return Err(self.dcx().create_err(errors::FnTypoWithImpl { fn_span }));
+                } else {
+                    return Err(old_err);
+                }
+            }
+        };
+
+        // Store the end of function parameters to give better diagnostics
+        // inside `parse_fn_body()`.
+        let fn_params_end = self.prev_token.span.shrink_to_hi();
+
+        let contract = self.parse_contract()?;
+
+        generics.where_clause = self.parse_where_clause()?; // `where T: Ord`
+
+        // `fn_params_end` is needed only when it's followed by a where clause.
+        let fn_params_end =
+            if generics.where_clause.has_where_token { Some(fn_params_end) } else { None };
+
+        let mut sig_hi = self.prev_token.span;
+        // Either `;` or `{ ... }`.
+        let body =
+            self.parse_fn_body(attrs, &ident, &mut sig_hi, fn_parse_mode.req_body, fn_params_end)?;
+        let fn_sig_span = sig_lo.to(sig_hi);
+        Ok((ident, FnSig { header, decl, span: fn_sig_span }, generics, contract, body))
+    }
+
+    /// Provide diagnostics when function body is not found
+    fn error_fn_body_not_found(
+        &mut self,
+        ident_span: Span,
+        req_body: bool,
+        fn_params_end: Option<Span>,
+    ) -> PResult<'a, ErrorGuaranteed> {
+        let expected: &[_] =
+            if req_body { &[exp!(OpenBrace)] } else { &[exp!(Semi), exp!(OpenBrace)] };
+        match self.expected_one_of_not_found(&[], expected) {
+            Ok(error_guaranteed) => Ok(error_guaranteed),
+            Err(mut err) => {
+                if self.token == token::CloseBrace {
+                    // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
+                    // the AST for typechecking.
+                    err.span_label(ident_span, "while parsing this `fn`");
+                    Ok(err.emit())
+                } else if self.token == token::RArrow
+                    && let Some(fn_params_end) = fn_params_end
+                {
+                    // Instead of a function body, the parser has encountered a right arrow
+                    // preceded by a where clause.
+
+                    // Find whether token behind the right arrow is a function trait and
+                    // store its span.
+                    let fn_trait_span =
+                        [sym::FnOnce, sym::FnMut, sym::Fn].into_iter().find_map(|symbol| {
+                            if self.prev_token.is_ident_named(symbol) {
+                                Some(self.prev_token.span)
+                            } else {
+                                None
+                            }
+                        });
+
+                    // Parse the return type (along with the right arrow) and store its span.
+                    // If there's a parse error, cancel it and return the existing error
+                    // as we are primarily concerned with the
+                    // expected-function-body-but-found-something-else error here.
+                    let arrow_span = self.token.span;
+                    let ty_span = match self.parse_ret_ty(
+                        AllowPlus::Yes,
+                        RecoverQPath::Yes,
+                        RecoverReturnSign::Yes,
+                    ) {
+                        Ok(ty_span) => ty_span.span().shrink_to_hi(),
+                        Err(parse_error) => {
+                            parse_error.cancel();
+                            return Err(err);
+                        }
+                    };
+                    let ret_ty_span = arrow_span.to(ty_span);
+
+                    if let Some(fn_trait_span) = fn_trait_span {
+                        // Typo'd Fn* trait bounds such as
+                        // fn foo<F>() where F: FnOnce -> () {}
+                        err.subdiagnostic(errors::FnTraitMissingParen { span: fn_trait_span });
+                    } else if let Ok(snippet) = self.psess.source_map().span_to_snippet(ret_ty_span)
+                    {
+                        // If token behind right arrow is not a Fn* trait, the programmer
+                        // probably misplaced the return type after the where clause like
+                        // `fn foo<T>() where T: Default -> u8 {}`
+                        err.primary_message(
+                            "return type should be specified after the function parameters",
+                        );
+                        err.subdiagnostic(errors::MisplacedReturnType {
+                            fn_params_end,
+                            snippet,
+                            ret_ty_span,
+                        });
+                    }
+                    Err(err)
+                } else {
+                    Err(err)
+                }
+            }
+        }
+    }
+
+    /// Parse the "body" of a function.
+    /// This can either be `;` when there's no body,
+    /// or e.g. a block when the function is a provided one.
+    fn parse_fn_body(
+        &mut self,
+        attrs: &mut AttrVec,
+        ident: &Ident,
+        sig_hi: &mut Span,
+        req_body: bool,
+        fn_params_end: Option<Span>,
+    ) -> PResult<'a, Option<P<Block>>> {
+        let has_semi = if req_body {
+            self.token == TokenKind::Semi
+        } else {
+            // Only include `;` in list of expected tokens if body is not required
+            self.check(exp!(Semi))
+        };
+        let (inner_attrs, body) = if has_semi {
+            // Include the trailing semicolon in the span of the signature
+            self.expect_semi()?;
+            *sig_hi = self.prev_token.span;
+            (AttrVec::new(), None)
+        } else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() {
+            self.parse_block_common(self.token.span, BlockCheckMode::Default, None)
+                .map(|(attrs, body)| (attrs, Some(body)))?
+        } else if self.token == token::Eq {
+            // Recover `fn foo() = $expr;`.
+            self.bump(); // `=`
+            let eq_sp = self.prev_token.span;
+            let _ = self.parse_expr()?;
+            self.expect_semi()?; // `;`
+            let span = eq_sp.to(self.prev_token.span);
+            let guar = self.dcx().emit_err(errors::FunctionBodyEqualsExpr {
+                span,
+                sugg: errors::FunctionBodyEqualsExprSugg { eq: eq_sp, semi: self.prev_token.span },
+            });
+            (AttrVec::new(), Some(self.mk_block_err(span, guar)))
+        } else {
+            self.error_fn_body_not_found(ident.span, req_body, fn_params_end)?;
+            (AttrVec::new(), None)
+        };
+        attrs.extend(inner_attrs);
+        Ok(body)
+    }
+
+    /// Is the current token the start of an `FnHeader` / not a valid parse?
+    ///
+    /// `check_pub` adds additional `pub` to the checks in case users place it
+    /// wrongly, can be used to ensure `pub` never comes after `default`.
+    pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
+        const ALL_QUALS: &[ExpKeywordPair] = &[
+            exp!(Pub),
+            exp!(Gen),
+            exp!(Const),
+            exp!(Async),
+            exp!(Unsafe),
+            exp!(Safe),
+            exp!(Extern),
+        ];
+
+        // We use an over-approximation here.
+        // `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
+        // `pub` is added in case users got confused with the ordering like `async pub fn`,
+        // only if it wasn't preceded by `default` as `default pub` is invalid.
+        let quals: &[_] = if check_pub {
+            ALL_QUALS
+        } else {
+            &[exp!(Gen), exp!(Const), exp!(Async), exp!(Unsafe), exp!(Safe), exp!(Extern)]
+        };
+        self.check_keyword_case(exp!(Fn), case) // Definitely an `fn`.
+            // `$qual fn` or `$qual $qual`:
+            || quals.iter().any(|&exp| self.check_keyword_case(exp, case))
+                && self.look_ahead(1, |t| {
+                    // `$qual fn`, e.g. `const fn` or `async fn`.
+                    t.is_keyword_case(kw::Fn, case)
+                    // Two qualifiers `$qual $qual` is enough, e.g. `async unsafe`.
+                    || (
+                        (
+                            t.is_non_raw_ident_where(|i|
+                                quals.iter().any(|exp| exp.kw == i.name)
+                                    // Rule out 2015 `const async: T = val`.
+                                    && i.is_reserved()
+                            )
+                            || case == Case::Insensitive
+                                && t.is_non_raw_ident_where(|i| quals.iter().any(|exp| {
+                                    exp.kw.as_str() == i.name.as_str().to_lowercase()
+                                }))
+                        )
+                        // Rule out `unsafe extern {`.
+                        && !self.is_unsafe_foreign_mod()
+                        // Rule out `async gen {` and `async gen move {`
+                        && !self.is_async_gen_block())
+                })
+            // `extern ABI fn`
+            || self.check_keyword_case(exp!(Extern), case)
+                // Use `tree_look_ahead` because `ABI` might be a metavariable,
+                // i.e. an invisible-delimited sequence, and `tree_look_ahead`
+                // will consider that a single element when looking ahead.
+                && self.look_ahead(1, |t| t.can_begin_string_literal())
+                && (self.tree_look_ahead(2, |tt| {
+                    match tt {
+                        TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case),
+                        TokenTree::Delimited(..) => false,
+                    }
+                }) == Some(true) ||
+                    // This branch is only for better diagnostics; `pub`, `unsafe`, etc. are not
+                    // allowed here.
+                    (self.may_recover()
+                        && self.tree_look_ahead(2, |tt| {
+                            match tt {
+                                TokenTree::Token(t, _) =>
+                                    ALL_QUALS.iter().any(|exp| {
+                                        t.is_keyword(exp.kw)
+                                    }),
+                                TokenTree::Delimited(..) => false,
+                            }
+                        }) == Some(true)
+                        && self.tree_look_ahead(3, |tt| {
+                            match tt {
+                                TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case),
+                                TokenTree::Delimited(..) => false,
+                            }
+                        }) == Some(true)
+                    )
+                )
+    }
+
+    /// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
+    /// up to and including the `fn` keyword. The formal grammar is:
+    ///
+    /// ```text
+    /// Extern = "extern" StringLit? ;
+    /// FnQual = "const"? "async"? "unsafe"? Extern? ;
+    /// FnFrontMatter = FnQual "fn" ;
+    /// ```
+    ///
+    /// `vis` represents the visibility that was already parsed, if any. Use
+    /// `Visibility::Inherited` when no visibility is known.
+    ///
+    /// If `parsing_mode` is `FrontMatterParsingMode::FunctionPtrType`, we error on `const` and `async` qualifiers,
+    /// which are not allowed in function pointer types.
+    pub(super) fn parse_fn_front_matter(
+        &mut self,
+        orig_vis: &Visibility,
+        case: Case,
+        parsing_mode: FrontMatterParsingMode,
+    ) -> PResult<'a, FnHeader> {
+        let sp_start = self.token.span;
+        let constness = self.parse_constness(case);
+        if parsing_mode == FrontMatterParsingMode::FunctionPtrType
+            && let Const::Yes(const_span) = constness
+        {
+            self.dcx().emit_err(FnPointerCannotBeConst {
+                span: const_span,
+                suggestion: const_span.until(self.token.span),
+            });
+        }
+
+        let async_start_sp = self.token.span;
+        let coroutine_kind = self.parse_coroutine_kind(case);
+        if parsing_mode == FrontMatterParsingMode::FunctionPtrType
+            && let Some(ast::CoroutineKind::Async { span: async_span, .. }) = coroutine_kind
+        {
+            self.dcx().emit_err(FnPointerCannotBeAsync {
+                span: async_span,
+                suggestion: async_span.until(self.token.span),
+            });
+        }
+        // FIXME(gen_blocks): emit a similar error for `gen fn()`
+
+        let unsafe_start_sp = self.token.span;
+        let safety = self.parse_safety(case);
+
+        let ext_start_sp = self.token.span;
+        let ext = self.parse_extern(case);
+
+        if let Some(CoroutineKind::Async { span, .. }) = coroutine_kind {
+            if span.is_rust_2015() {
+                self.dcx().emit_err(errors::AsyncFnIn2015 {
+                    span,
+                    help: errors::HelpUseLatestEdition::new(),
+                });
+            }
+        }
+
+        match coroutine_kind {
+            Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
+                self.psess.gated_spans.gate(sym::gen_blocks, span);
+            }
+            Some(CoroutineKind::Async { .. }) | None => {}
+        }
+
+        if !self.eat_keyword_case(exp!(Fn), case) {
+            // It is possible for `expect_one_of` to recover given the contents of
+            // `self.expected_token_types`, therefore, do not use `self.unexpected()` which doesn't
+            // account for this.
+            match self.expect_one_of(&[], &[]) {
+                Ok(Recovered::Yes(_)) => {}
+                Ok(Recovered::No) => unreachable!(),
+                Err(mut err) => {
+                    // Qualifier keywords ordering check
+                    enum WrongKw {
+                        Duplicated(Span),
+                        Misplaced(Span),
+                        /// `MisplacedDisallowedQualifier` is only used instead of `Misplaced`,
+                        /// when the misplaced keyword is disallowed by the current `FrontMatterParsingMode`.
+                        /// In this case, we avoid generating the suggestion to swap around the keywords,
+                        /// as we already generated a suggestion to remove the keyword earlier.
+                        MisplacedDisallowedQualifier,
+                    }
+
+                    // We may be able to recover
+                    let mut recover_constness = constness;
+                    let mut recover_coroutine_kind = coroutine_kind;
+                    let mut recover_safety = safety;
+                    // This will allow the machine fix to directly place the keyword in the correct place or to indicate
+                    // that the keyword is already present and the second instance should be removed.
+                    let wrong_kw = if self.check_keyword(exp!(Const)) {
+                        match constness {
+                            Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
+                            Const::No => {
+                                recover_constness = Const::Yes(self.token.span);
+                                match parsing_mode {
+                                    FrontMatterParsingMode::Function => {
+                                        Some(WrongKw::Misplaced(async_start_sp))
+                                    }
+                                    FrontMatterParsingMode::FunctionPtrType => {
+                                        self.dcx().emit_err(FnPointerCannotBeConst {
+                                            span: self.token.span,
+                                            suggestion: self
+                                                .token
+                                                .span
+                                                .with_lo(self.prev_token.span.hi()),
+                                        });
+                                        Some(WrongKw::MisplacedDisallowedQualifier)
+                                    }
+                                }
+                            }
+                        }
+                    } else if self.check_keyword(exp!(Async)) {
+                        match coroutine_kind {
+                            Some(CoroutineKind::Async { span, .. }) => {
+                                Some(WrongKw::Duplicated(span))
+                            }
+                            Some(CoroutineKind::AsyncGen { span, .. }) => {
+                                Some(WrongKw::Duplicated(span))
+                            }
+                            Some(CoroutineKind::Gen { .. }) => {
+                                recover_coroutine_kind = Some(CoroutineKind::AsyncGen {
+                                    span: self.token.span,
+                                    closure_id: DUMMY_NODE_ID,
+                                    return_impl_trait_id: DUMMY_NODE_ID,
+                                });
+                                // FIXME(gen_blocks): This span is wrong, didn't want to think about it.
+                                Some(WrongKw::Misplaced(unsafe_start_sp))
+                            }
+                            None => {
+                                recover_coroutine_kind = Some(CoroutineKind::Async {
+                                    span: self.token.span,
+                                    closure_id: DUMMY_NODE_ID,
+                                    return_impl_trait_id: DUMMY_NODE_ID,
+                                });
+                                match parsing_mode {
+                                    FrontMatterParsingMode::Function => {
+                                        Some(WrongKw::Misplaced(async_start_sp))
+                                    }
+                                    FrontMatterParsingMode::FunctionPtrType => {
+                                        self.dcx().emit_err(FnPointerCannotBeAsync {
+                                            span: self.token.span,
+                                            suggestion: self
+                                                .token
+                                                .span
+                                                .with_lo(self.prev_token.span.hi()),
+                                        });
+                                        Some(WrongKw::MisplacedDisallowedQualifier)
+                                    }
+                                }
+                            }
+                        }
+                    } else if self.check_keyword(exp!(Unsafe)) {
+                        match safety {
+                            Safety::Unsafe(sp) => Some(WrongKw::Duplicated(sp)),
+                            Safety::Safe(sp) => {
+                                recover_safety = Safety::Unsafe(self.token.span);
+                                Some(WrongKw::Misplaced(sp))
+                            }
+                            Safety::Default => {
+                                recover_safety = Safety::Unsafe(self.token.span);
+                                Some(WrongKw::Misplaced(ext_start_sp))
+                            }
+                        }
+                    } else if self.check_keyword(exp!(Safe)) {
+                        match safety {
+                            Safety::Safe(sp) => Some(WrongKw::Duplicated(sp)),
+                            Safety::Unsafe(sp) => {
+                                recover_safety = Safety::Safe(self.token.span);
+                                Some(WrongKw::Misplaced(sp))
+                            }
+                            Safety::Default => {
+                                recover_safety = Safety::Safe(self.token.span);
+                                Some(WrongKw::Misplaced(ext_start_sp))
+                            }
+                        }
+                    } else {
+                        None
+                    };
+
+                    // The keyword is already present, suggest removal of the second instance
+                    if let Some(WrongKw::Duplicated(original_sp)) = wrong_kw {
+                        let original_kw = self
+                            .span_to_snippet(original_sp)
+                            .expect("Span extracted directly from keyword should always work");
+
+                        err.span_suggestion(
+                            self.token_uninterpolated_span(),
+                            format!("`{original_kw}` already used earlier, remove this one"),
+                            "",
+                            Applicability::MachineApplicable,
+                        )
+                        .span_note(original_sp, format!("`{original_kw}` first seen here"));
+                    }
+                    // The keyword has not been seen yet, suggest correct placement in the function front matter
+                    else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw {
+                        let correct_pos_sp = correct_pos_sp.to(self.prev_token.span);
+                        if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) {
+                            let misplaced_qual_sp = self.token_uninterpolated_span();
+                            let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap();
+
+                            err.span_suggestion(
+                                    correct_pos_sp.to(misplaced_qual_sp),
+                                    format!("`{misplaced_qual}` must come before `{current_qual}`"),
+                                    format!("{misplaced_qual} {current_qual}"),
+                                    Applicability::MachineApplicable,
+                                ).note("keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`");
+                        }
+                    }
+                    // Recover incorrect visibility order such as `async pub`
+                    else if self.check_keyword(exp!(Pub)) {
+                        let sp = sp_start.to(self.prev_token.span);
+                        if let Ok(snippet) = self.span_to_snippet(sp) {
+                            let current_vis = match self.parse_visibility(FollowedByType::No) {
+                                Ok(v) => v,
+                                Err(d) => {
+                                    d.cancel();
+                                    return Err(err);
+                                }
+                            };
+                            let vs = pprust::vis_to_string(&current_vis);
+                            let vs = vs.trim_end();
+
+                            // There was no explicit visibility
+                            if matches!(orig_vis.kind, VisibilityKind::Inherited) {
+                                err.span_suggestion(
+                                    sp_start.to(self.prev_token.span),
+                                    format!("visibility `{vs}` must come before `{snippet}`"),
+                                    format!("{vs} {snippet}"),
+                                    Applicability::MachineApplicable,
+                                );
+                            }
+                            // There was an explicit visibility
+                            else {
+                                err.span_suggestion(
+                                    current_vis.span,
+                                    "there is already a visibility modifier, remove one",
+                                    "",
+                                    Applicability::MachineApplicable,
+                                )
+                                .span_note(orig_vis.span, "explicit visibility first seen here");
+                            }
+                        }
+                    }
+
+                    // FIXME(gen_blocks): add keyword recovery logic for genness
+
+                    if let Some(wrong_kw) = wrong_kw
+                        && self.may_recover()
+                        && self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case))
+                    {
+                        // Advance past the misplaced keyword and `fn`
+                        self.bump();
+                        self.bump();
+                        // When we recover from a `MisplacedDisallowedQualifier`, we already emitted an error for the disallowed qualifier
+                        // So we don't emit another error that the qualifier is unexpected.
+                        if matches!(wrong_kw, WrongKw::MisplacedDisallowedQualifier) {
+                            err.cancel();
+                        } else {
+                            err.emit();
+                        }
+                        return Ok(FnHeader {
+                            constness: recover_constness,
+                            safety: recover_safety,
+                            coroutine_kind: recover_coroutine_kind,
+                            ext,
+                        });
+                    }
+
+                    return Err(err);
+                }
+            }
+        }
+
+        Ok(FnHeader { constness, safety, coroutine_kind, ext })
+    }
+
+    /// Parses the parameter list and result type of a function declaration.
+    pub(super) fn parse_fn_decl(
+        &mut self,
+        req_name: ReqName,
+        ret_allow_plus: AllowPlus,
+        recover_return_sign: RecoverReturnSign,
+    ) -> PResult<'a, P<FnDecl>> {
+        Ok(P(FnDecl {
+            inputs: self.parse_fn_params(req_name)?,
+            output: self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes, recover_return_sign)?,
+        }))
+    }
+
+    /// Parses the parameter list of a function, including the `(` and `)` delimiters.
+    pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
+        let mut first_param = true;
+        // Parse the arguments, starting out with `self` being allowed...
+        if self.token != TokenKind::OpenParen
+        // might be typo'd trait impl, handled elsewhere
+        && !self.token.is_keyword(kw::For)
+        {
+            // recover from missing argument list, e.g. `fn main -> () {}`
+            self.dcx()
+                .emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() });
+            return Ok(ThinVec::new());
+        }
+
+        let (mut params, _) = self.parse_paren_comma_seq(|p| {
+            p.recover_vcs_conflict_marker();
+            let snapshot = p.create_snapshot_for_diagnostic();
+            let param = p.parse_param_general(req_name, first_param, true).or_else(|e| {
+                let guar = e.emit();
+                // When parsing a param failed, we should check to make the span of the param
+                // not contain '(' before it.
+                // For example when parsing `*mut Self` in function `fn oof(*mut Self)`.
+                let lo = if let TokenKind::OpenParen = p.prev_token.kind {
+                    p.prev_token.span.shrink_to_hi()
+                } else {
+                    p.prev_token.span
+                };
+                p.restore_snapshot(snapshot);
+                // Skip every token until next possible arg or end.
+                p.eat_to_tokens(&[exp!(Comma), exp!(CloseParen)]);
+                // Create a placeholder argument for proper arg count (issue #34264).
+                Ok(dummy_arg(Ident::new(sym::dummy, lo.to(p.prev_token.span)), guar))
+            });
+            // ...now that we've parsed the first argument, `self` is no longer allowed.
+            first_param = false;
+            param
+        })?;
+        // Replace duplicated recovered params with `_` pattern to avoid unnecessary errors.
+        self.deduplicate_recovered_params_names(&mut params);
+        Ok(params)
+    }
+
+    /// Parses a single function parameter.
+    ///
+    /// - `self` is syntactically allowed when `first_param` holds.
+    /// - `recover_arg_parse` is used to recover from a failed argument parse.
+    pub(super) fn parse_param_general(
+        &mut self,
+        req_name: ReqName,
+        first_param: bool,
+        recover_arg_parse: bool,
+    ) -> PResult<'a, Param> {
+        let lo = self.token.span;
+        let attrs = self.parse_outer_attributes()?;
+        self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
+            if let Some(mut param) = this.parse_self_param()? {
+                param.attrs = attrs;
+                let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
+                return Ok((res?, Trailing::No, UsePreAttrPos::No));
+            }
+
+            let is_name_required = match this.token.kind {
+                token::DotDotDot => false,
+                _ => req_name(this.token.span.with_neighbor(this.prev_token.span).edition()),
+            };
+            let (pat, ty) = if is_name_required || this.is_named_param() {
+                debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
+                let (pat, colon) = this.parse_fn_param_pat_colon()?;
+                if !colon {
+                    let mut err = this.unexpected().unwrap_err();
+                    return if let Some(ident) =
+                        this.parameter_without_type(&mut err, pat, is_name_required, first_param)
+                    {
+                        let guar = err.emit();
+                        Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No))
+                    } else {
+                        Err(err)
+                    };
+                }
+
+                this.eat_incorrect_doc_comment_for_param_type();
+                (pat, this.parse_ty_for_param()?)
+            } else {
+                debug!("parse_param_general ident_to_pat");
+                let parser_snapshot_before_ty = this.create_snapshot_for_diagnostic();
+                this.eat_incorrect_doc_comment_for_param_type();
+                let mut ty = this.parse_ty_for_param();
+
+                if let Ok(t) = &ty {
+                    // Check for trailing angle brackets
+                    if let TyKind::Path(_, Path { segments, .. }) = &t.kind
+                        && let Some(segment) = segments.last()
+                        && let Some(guar) =
+                            this.check_trailing_angle_brackets(segment, &[exp!(CloseParen)])
+                    {
+                        return Ok((
+                            dummy_arg(segment.ident, guar),
+                            Trailing::No,
+                            UsePreAttrPos::No,
+                        ));
+                    }
+
+                    if this.token != token::Comma && this.token != token::CloseParen {
+                        // This wasn't actually a type, but a pattern looking like a type,
+                        // so we are going to rollback and re-parse for recovery.
+                        ty = this.unexpected_any();
+                    }
+                }
+                match ty {
+                    Ok(ty) => {
+                        let pat = this.mk_pat(ty.span, PatKind::Missing);
+                        (pat, ty)
+                    }
+                    // If this is a C-variadic argument and we hit an error, return the error.
+                    Err(err) if this.token == token::DotDotDot => return Err(err),
+                    Err(err) if this.unmatched_angle_bracket_count > 0 => return Err(err),
+                    Err(err) if recover_arg_parse => {
+                        // Recover from attempting to parse the argument as a type without pattern.
+                        err.cancel();
+                        this.restore_snapshot(parser_snapshot_before_ty);
+                        this.recover_arg_parse()?
+                    }
+                    Err(err) => return Err(err),
+                }
+            };
+
+            let span = lo.to(this.prev_token.span);
+
+            Ok((
+                Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
+                Trailing::No,
+                UsePreAttrPos::No,
+            ))
+        })
+    }
+
+    /// Returns the parsed optional self parameter and whether a self shortcut was used.
+    fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
+        // Extract an identifier *after* having confirmed that the token is one.
+        let expect_self_ident = |this: &mut Self| match this.token.ident() {
+            Some((ident, IdentIsRaw::No)) => {
+                this.bump();
+                ident
+            }
+            _ => unreachable!(),
+        };
+        // is lifetime `n` tokens ahead?
+        let is_lifetime = |this: &Self, n| this.look_ahead(n, |t| t.is_lifetime());
+        // Is `self` `n` tokens ahead?
+        let is_isolated_self = |this: &Self, n| {
+            this.is_keyword_ahead(n, &[kw::SelfLower])
+                && this.look_ahead(n + 1, |t| t != &token::PathSep)
+        };
+        // Is `pin const self` `n` tokens ahead?
+        let is_isolated_pin_const_self = |this: &Self, n| {
+            this.look_ahead(n, |token| token.is_ident_named(sym::pin))
+                && this.is_keyword_ahead(n + 1, &[kw::Const])
+                && is_isolated_self(this, n + 2)
+        };
+        // Is `mut self` `n` tokens ahead?
+        let is_isolated_mut_self =
+            |this: &Self, n| this.is_keyword_ahead(n, &[kw::Mut]) && is_isolated_self(this, n + 1);
+        // Is `pin mut self` `n` tokens ahead?
+        let is_isolated_pin_mut_self = |this: &Self, n| {
+            this.look_ahead(n, |token| token.is_ident_named(sym::pin))
+                && is_isolated_mut_self(this, n + 1)
+        };
+        // Parse `self` or `self: TYPE`. We already know the current token is `self`.
+        let parse_self_possibly_typed = |this: &mut Self, m| {
+            let eself_ident = expect_self_ident(this);
+            let eself_hi = this.prev_token.span;
+            let eself = if this.eat(exp!(Colon)) {
+                SelfKind::Explicit(this.parse_ty()?, m)
+            } else {
+                SelfKind::Value(m)
+            };
+            Ok((eself, eself_ident, eself_hi))
+        };
+        let expect_self_ident_not_typed =
+            |this: &mut Self, modifier: &SelfKind, modifier_span: Span| {
+                let eself_ident = expect_self_ident(this);
+
+                // Recover `: Type` after a qualified self
+                if this.may_recover() && this.eat_noexpect(&token::Colon) {
+                    let snap = this.create_snapshot_for_diagnostic();
+                    match this.parse_ty() {
+                        Ok(ty) => {
+                            this.dcx().emit_err(errors::IncorrectTypeOnSelf {
+                                span: ty.span,
+                                move_self_modifier: errors::MoveSelfModifier {
+                                    removal_span: modifier_span,
+                                    insertion_span: ty.span.shrink_to_lo(),
+                                    modifier: modifier.to_ref_suggestion(),
+                                },
+                            });
+                        }
+                        Err(diag) => {
+                            diag.cancel();
+                            this.restore_snapshot(snap);
+                        }
+                    }
+                }
+                eself_ident
+            };
+        // Recover for the grammar `*self`, `*const self`, and `*mut self`.
+        let recover_self_ptr = |this: &mut Self| {
+            this.dcx().emit_err(errors::SelfArgumentPointer { span: this.token.span });
+
+            Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span))
+        };
+
+        // Parse optional `self` parameter of a method.
+        // Only a limited set of initial token sequences is considered `self` parameters; anything
+        // else is parsed as a normal function parameter list, so some lookahead is required.
+        let eself_lo = self.token.span;
+        let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
+            token::And => {
+                let has_lifetime = is_lifetime(self, 1);
+                let skip_lifetime_count = has_lifetime as usize;
+                let eself = if is_isolated_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    SelfKind::Region(lifetime, Mutability::Not)
+                } else if is_isolated_mut_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} mut self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    self.bump(); // mut
+                    SelfKind::Region(lifetime, Mutability::Mut)
+                } else if is_isolated_pin_const_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} pin const self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
+                    self.bump(); // pin
+                    self.bump(); // const
+                    SelfKind::Pinned(lifetime, Mutability::Not)
+                } else if is_isolated_pin_mut_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} pin mut self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
+                    self.bump(); // pin
+                    self.bump(); // mut
+                    SelfKind::Pinned(lifetime, Mutability::Mut)
+                } else {
+                    // `&not_self`
+                    return Ok(None);
+                };
+                let hi = self.token.span;
+                let self_ident = expect_self_ident_not_typed(self, &eself, eself_lo.until(hi));
+                (eself, self_ident, hi)
+            }
+            // `*self`
+            token::Star if is_isolated_self(self, 1) => {
+                self.bump();
+                recover_self_ptr(self)?
+            }
+            // `*mut self` and `*const self`
+            token::Star
+                if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) =>
+            {
+                self.bump();
+                self.bump();
+                recover_self_ptr(self)?
+            }
+            // `self` and `self: TYPE`
+            token::Ident(..) if is_isolated_self(self, 0) => {
+                parse_self_possibly_typed(self, Mutability::Not)?
+            }
+            // `mut self` and `mut self: TYPE`
+            token::Ident(..) if is_isolated_mut_self(self, 0) => {
+                self.bump();
+                parse_self_possibly_typed(self, Mutability::Mut)?
+            }
+            _ => return Ok(None),
+        };
+
+        let eself = source_map::respan(eself_lo.to(eself_hi), eself);
+        Ok(Some(Param::from_self(AttrVec::default(), eself, eself_ident)))
+    }
+
+    fn is_named_param(&self) -> bool {
+        let offset = match &self.token.kind {
+            token::OpenInvisible(origin) => match origin {
+                InvisibleOrigin::MetaVar(MetaVarKind::Pat(_)) => {
+                    return self.check_noexpect_past_close_delim(&token::Colon);
+                }
+                _ => 0,
+            },
+            token::And | token::AndAnd => 1,
+            _ if self.token.is_keyword(kw::Mut) => 1,
+            _ => 0,
+        };
+
+        self.look_ahead(offset, |t| t.is_ident())
+            && self.look_ahead(offset + 1, |t| t == &token::Colon)
+    }
+
+    fn recover_self_param(&mut self) -> bool {
+        matches!(
+            self.parse_outer_attributes()
+                .and_then(|_| self.parse_self_param())
+                .map_err(|e| e.cancel()),
+            Ok(Some(_))
+        )
+    }
+}
+
+enum IsMacroRulesItem {
+    Yes { has_bang: bool },
+    No,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub(super) enum FrontMatterParsingMode {
+    /// Parse the front matter of a function declaration
+    Function,
+    /// Parse the front matter of a function pointet type.
+    /// For function pointer types, the `const` and `async` keywords are not permitted.
+    FunctionPtrType,
+}
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
new file mode 100644
index 00000000000..90491e53249
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -0,0 +1,1665 @@
+pub mod attr;
+mod attr_wrapper;
+mod diagnostics;
+mod expr;
+mod generics;
+mod item;
+mod nonterminal;
+mod pat;
+mod path;
+mod stmt;
+pub mod token_type;
+mod ty;
+
+// Parsers for non-functionlike builtin macros are defined in rustc_parse so they can be used by
+// both rustc_builtin_macros and rustfmt.
+pub mod asm;
+pub mod cfg_select;
+
+use std::assert_matches::debug_assert_matches;
+use std::{fmt, mem, slice};
+
+use attr_wrapper::{AttrWrapper, UsePreAttrPos};
+pub use diagnostics::AttemptLocalParseRecovery;
+pub(crate) use expr::ForbiddenLetReason;
+pub(crate) use item::FnParseMode;
+pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
+use path::PathStyle;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{
+    self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
+};
+use rustc_ast::tokenstream::{
+    ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor,
+};
+use rustc_ast::util::case::Case;
+use rustc_ast::{
+    self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
+    DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
+    Visibility, VisibilityKind,
+};
+use rustc_ast_pretty::pprust;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
+use rustc_index::interval::IntervalSet;
+use rustc_session::parse::ParseSess;
+use rustc_span::{Ident, Span, Symbol, kw, sym};
+use thin_vec::ThinVec;
+use token_type::TokenTypeSet;
+pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
+use tracing::debug;
+
+use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
+use crate::exp;
+
+#[cfg(test)]
+mod tests;
+
+// Ideally, these tests would be in `rustc_ast`. But they depend on having a
+// parser, so they are here.
+#[cfg(test)]
+mod tokenstream {
+    mod tests;
+}
+
+bitflags::bitflags! {
+    /// Restrictions applied while parsing.
+    ///
+    /// The parser maintains a bitset of restrictions it will honor while
+    /// parsing. This is essentially used as a way of tracking state of what
+    /// is being parsed and to change behavior based on that.
+    #[derive(Clone, Copy, Debug)]
+    struct Restrictions: u8 {
+        /// Restricts expressions for use in statement position.
+        ///
+        /// When expressions are used in various places, like statements or
+        /// match arms, this is used to stop parsing once certain tokens are
+        /// reached.
+        ///
+        /// For example, `if true {} & 1` with `STMT_EXPR` in effect is parsed
+        /// as two separate expression statements (`if` and a reference to 1).
+        /// Otherwise it is parsed as a bitwise AND where `if` is on the left
+        /// and 1 is on the right.
+        const STMT_EXPR         = 1 << 0;
+        /// Do not allow struct literals.
+        ///
+        /// There are several places in the grammar where we don't want to
+        /// allow struct literals because they can require lookahead, or
+        /// otherwise could be ambiguous or cause confusion. For example,
+        /// `if Foo {} {}` isn't clear if it is `Foo{}` struct literal, or
+        /// just `Foo` is the condition, followed by a consequent block,
+        /// followed by an empty block.
+        ///
+        /// See [RFC 92](https://rust-lang.github.io/rfcs/0092-struct-grammar.html).
+        const NO_STRUCT_LITERAL = 1 << 1;
+        /// Used to provide better error messages for const generic arguments.
+        ///
+        /// An un-braced const generic argument is limited to a very small
+        /// subset of expressions. This is used to detect the situation where
+        /// an expression outside of that subset is used, and to suggest to
+        /// wrap the expression in braces.
+        const CONST_EXPR        = 1 << 2;
+        /// Allows `let` expressions.
+        ///
+        /// `let pattern = scrutinee` is parsed as an expression, but it is
+        /// only allowed in let chains (`if` and `while` conditions).
+        /// Otherwise it is not an expression (note that `let` in statement
+        /// positions is treated as a `StmtKind::Let` statement, which has a
+        /// slightly different grammar).
+        const ALLOW_LET         = 1 << 3;
+        /// Used to detect a missing `=>` in a match guard.
+        ///
+        /// This is used for error handling in a match guard to give a better
+        /// error message if the `=>` is missing. It is set when parsing the
+        /// guard expression.
+        const IN_IF_GUARD       = 1 << 4;
+        /// Used to detect the incorrect use of expressions in patterns.
+        ///
+        /// This is used for error handling while parsing a pattern. During
+        /// error recovery, this will be set to try to parse the pattern as an
+        /// expression, but halts parsing the expression when reaching certain
+        /// tokens like `=`.
+        const IS_PAT            = 1 << 5;
+    }
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum SemiColonMode {
+    Break,
+    Ignore,
+    Comma,
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum BlockMode {
+    Break,
+    Ignore,
+}
+
+/// Whether or not we should force collection of tokens for an AST node,
+/// regardless of whether or not it has attributes
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum ForceCollect {
+    Yes,
+    No,
+}
+
+/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
+#[macro_export]
+macro_rules! maybe_recover_from_interpolated_ty_qpath {
+    ($self: expr, $allow_qpath_recovery: expr) => {
+        if $allow_qpath_recovery
+            && $self.may_recover()
+            && let Some(mv_kind) = $self.token.is_metavar_seq()
+            && let token::MetaVarKind::Ty { .. } = mv_kind
+            && $self.check_noexpect_past_close_delim(&token::PathSep)
+        {
+            // Reparse the type, then move to recovery.
+            let ty = $self
+                .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
+                .expect("metavar seq ty");
+
+            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
+        }
+    };
+}
+
+#[derive(Clone, Copy, Debug)]
+pub enum Recovery {
+    Allowed,
+    Forbidden,
+}
+
+#[derive(Clone)]
+pub struct Parser<'a> {
+    pub psess: &'a ParseSess,
+    /// The current token.
+    pub token: Token,
+    /// The spacing for the current token.
+    token_spacing: Spacing,
+    /// The previous token.
+    pub prev_token: Token,
+    pub capture_cfg: bool,
+    restrictions: Restrictions,
+    expected_token_types: TokenTypeSet,
+    token_cursor: TokenCursor,
+    // The number of calls to `bump`, i.e. the position in the token stream.
+    num_bump_calls: u32,
+    // During parsing we may sometimes need to "unglue" a glued token into two
+    // or three component tokens (e.g. `>>` into `>` and `>`, or `>>=` into `>`
+    // and `>` and `=`), so the parser can consume them one at a time. This
+    // process bypasses the normal capturing mechanism (e.g. `num_bump_calls`
+    // will not be incremented), since the "unglued" tokens due not exist in
+    // the original `TokenStream`.
+    //
+    // If we end up consuming all the component tokens, this is not an issue,
+    // because we'll end up capturing the single "glued" token.
+    //
+    // However, sometimes we may want to capture not all of the original
+    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
+    // requires us to unglue the trailing `>>` token. The `break_last_token`
+    // field is used to track these tokens. They get appended to the captured
+    // stream when we evaluate a `LazyAttrTokenStream`.
+    //
+    // This value is always 0, 1, or 2. It can only reach 2 when splitting
+    // `>>=` or `<<=`.
+    break_last_token: u32,
+    /// This field is used to keep track of how many left angle brackets we have seen. This is
+    /// required in order to detect extra leading left angle brackets (`<` characters) and error
+    /// appropriately.
+    ///
+    /// See the comments in the `parse_path_segment` function for more details.
+    unmatched_angle_bracket_count: u16,
+    angle_bracket_nesting: u16,
+
+    last_unexpected_token_span: Option<Span>,
+    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
+    subparser_name: Option<&'static str>,
+    capture_state: CaptureState,
+    /// This allows us to recover when the user forget to add braces around
+    /// multiple statements in the closure body.
+    current_closure: Option<ClosureSpans>,
+    /// Whether the parser is allowed to do recovery.
+    /// This is disabled when parsing macro arguments, see #103534
+    recovery: Recovery,
+}
+
+// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
+// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
+// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
+#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
+rustc_data_structures::static_assert_size!(Parser<'_>, 288);
+
+/// Stores span information about a closure.
+#[derive(Clone, Debug)]
+struct ClosureSpans {
+    whole_closure: Span,
+    closing_pipe: Span,
+    body: Span,
+}
+
+/// Controls how we capture tokens. Capturing can be expensive,
+/// so we try to avoid performing capturing in cases where
+/// we will never need an `AttrTokenStream`.
+#[derive(Copy, Clone, Debug)]
+enum Capturing {
+    /// We aren't performing any capturing - this is the default mode.
+    No,
+    /// We are capturing tokens
+    Yes,
+}
+
+// This state is used by `Parser::collect_tokens`.
+#[derive(Clone, Debug)]
+struct CaptureState {
+    capturing: Capturing,
+    parser_replacements: Vec<ParserReplacement>,
+    inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
+    // `IntervalSet` is good for perf because attrs are mostly added to this
+    // set in contiguous ranges.
+    seen_attrs: IntervalSet<AttrId>,
+}
+
+/// A sequence separator.
+#[derive(Debug)]
+struct SeqSep<'a> {
+    /// The separator token.
+    sep: Option<ExpTokenPair<'a>>,
+    /// `true` if a trailing separator is allowed.
+    trailing_sep_allowed: bool,
+}
+
+impl<'a> SeqSep<'a> {
+    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
+        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
+    }
+
+    fn none() -> SeqSep<'a> {
+        SeqSep { sep: None, trailing_sep_allowed: false }
+    }
+}
+
+#[derive(Debug)]
+pub enum FollowedByType {
+    Yes,
+    No,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum Trailing {
+    No,
+    Yes,
+}
+
+impl From<bool> for Trailing {
+    fn from(b: bool) -> Trailing {
+        if b { Trailing::Yes } else { Trailing::No }
+    }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub(super) enum TokenDescription {
+    ReservedIdentifier,
+    Keyword,
+    ReservedKeyword,
+    DocComment,
+
+    // Expanded metavariables are wrapped in invisible delimiters which aren't
+    // pretty-printed. In error messages we must handle these specially
+    // otherwise we get confusing things in messages like "expected `(`, found
+    // ``". It's better to say e.g. "expected `(`, found type metavariable".
+    MetaVar(MetaVarKind),
+}
+
+impl TokenDescription {
+    pub(super) fn from_token(token: &Token) -> Option<Self> {
+        match token.kind {
+            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
+            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
+            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
+            token::DocComment(..) => Some(TokenDescription::DocComment),
+            token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
+                Some(TokenDescription::MetaVar(kind))
+            }
+            _ => None,
+        }
+    }
+}
+
+pub fn token_descr(token: &Token) -> String {
+    let s = pprust::token_to_string(token).to_string();
+
+    match (TokenDescription::from_token(token), &token.kind) {
+        (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"),
+        (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"),
+        (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"),
+        (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"),
+        // Deliberately doesn't print `s`, which is empty.
+        (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
+        (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
+        (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
+        (None, _) => format!("`{s}`"),
+    }
+}
+
+impl<'a> Parser<'a> {
+    pub fn new(
+        psess: &'a ParseSess,
+        stream: TokenStream,
+        subparser_name: Option<&'static str>,
+    ) -> Self {
+        let mut parser = Parser {
+            psess,
+            token: Token::dummy(),
+            token_spacing: Spacing::Alone,
+            prev_token: Token::dummy(),
+            capture_cfg: false,
+            restrictions: Restrictions::empty(),
+            expected_token_types: TokenTypeSet::new(),
+            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
+            num_bump_calls: 0,
+            break_last_token: 0,
+            unmatched_angle_bracket_count: 0,
+            angle_bracket_nesting: 0,
+            last_unexpected_token_span: None,
+            subparser_name,
+            capture_state: CaptureState {
+                capturing: Capturing::No,
+                parser_replacements: Vec::new(),
+                inner_attr_parser_ranges: Default::default(),
+                seen_attrs: IntervalSet::new(u32::MAX as usize),
+            },
+            current_closure: None,
+            recovery: Recovery::Allowed,
+        };
+
+        // Make parser point to the first token.
+        parser.bump();
+
+        // Change this from 1 back to 0 after the bump. This eases debugging of
+        // `Parser::collect_tokens` because 0-indexed token positions are nicer
+        // than 1-indexed token positions.
+        parser.num_bump_calls = 0;
+
+        parser
+    }
+
+    #[inline]
+    pub fn recovery(mut self, recovery: Recovery) -> Self {
+        self.recovery = recovery;
+        self
+    }
+
+    #[inline]
+    fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
+        let old = mem::replace(&mut self.recovery, recovery);
+        let res = f(self);
+        self.recovery = old;
+        res
+    }
+
+    /// Whether the parser is allowed to recover from broken code.
+    ///
+    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
+    /// is not allowed. All recovery done by the parser must be gated behind this check.
+    ///
+    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
+    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
+    #[inline]
+    fn may_recover(&self) -> bool {
+        matches!(self.recovery, Recovery::Allowed)
+    }
+
+    /// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
+    /// (both those functions never return "Ok", and so can lie like that in the type).
+    pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
+        match self.expect_one_of(&[], &[]) {
+            Err(e) => Err(e),
+            // We can get `Ok(true)` from `recover_closing_delimiter`
+            // which is called in `expected_one_of_not_found`.
+            Ok(_) => FatalError.raise(),
+        }
+    }
+
+    pub fn unexpected(&mut self) -> PResult<'a, ()> {
+        self.unexpected_any()
+    }
+
+    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
+    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
+        if self.expected_token_types.is_empty() {
+            if self.token == *exp.tok {
+                self.bump();
+                Ok(Recovered::No)
+            } else {
+                self.unexpected_try_recover(exp.tok)
+            }
+        } else {
+            self.expect_one_of(slice::from_ref(&exp), &[])
+        }
+    }
+
+    /// Expect next token to be edible or inedible token. If edible,
+    /// then consume it; if inedible, then return without consuming
+    /// anything. Signal a fatal error if next token is unexpected.
+    fn expect_one_of(
+        &mut self,
+        edible: &[ExpTokenPair<'_>],
+        inedible: &[ExpTokenPair<'_>],
+    ) -> PResult<'a, Recovered> {
+        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
+            self.bump();
+            Ok(Recovered::No)
+        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
+            // leave it in the input
+            Ok(Recovered::No)
+        } else if self.token != token::Eof
+            && self.last_unexpected_token_span == Some(self.token.span)
+        {
+            FatalError.raise();
+        } else {
+            self.expected_one_of_not_found(edible, inedible)
+                .map(|error_guaranteed| Recovered::Yes(error_guaranteed))
+        }
+    }
+
+    // Public for rustfmt usage.
+    pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
+        self.parse_ident_common(true)
+    }
+
+    fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
+        let (ident, is_raw) = self.ident_or_err(recover)?;
+
+        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
+            let err = self.expected_ident_found_err();
+            if recover {
+                err.emit();
+            } else {
+                return Err(err);
+            }
+        }
+        self.bump();
+        Ok(ident)
+    }
+
+    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
+        match self.token.ident() {
+            Some(ident) => Ok(ident),
+            None => self.expected_ident_found(recover),
+        }
+    }
+
+    /// Checks if the next token is `tok`, and returns `true` if so.
+    ///
+    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
+    /// encountered.
+    #[inline]
+    fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
+        let is_present = self.token == *exp.tok;
+        if !is_present {
+            self.expected_token_types.insert(exp.token_type);
+        }
+        is_present
+    }
+
+    #[inline]
+    #[must_use]
+    fn check_noexpect(&self, tok: &TokenKind) -> bool {
+        self.token == *tok
+    }
+
+    // Check the first token after the delimiter that closes the current
+    // delimited sequence. (Panics if used in the outermost token stream, which
+    // has no delimiters.) It uses a clone of the relevant tree cursor to skip
+    // past the entire `TokenTree::Delimited` in a single step, avoiding the
+    // need for unbounded token lookahead.
+    //
+    // Primarily used when `self.token` matches `OpenInvisible(_))`, to look
+    // ahead through the current metavar expansion.
+    fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
+        let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
+        tree_cursor.bump();
+        matches!(
+            tree_cursor.curr(),
+            Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
+        )
+    }
+
+    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
+    ///
+    /// the main purpose of this function is to reduce the cluttering of the suggestions list
+    /// which using the normal eat method could introduce in some cases.
+    #[inline]
+    #[must_use]
+    fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
+        let is_present = self.check_noexpect(tok);
+        if is_present {
+            self.bump()
+        }
+        is_present
+    }
+
+    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
+    #[inline]
+    #[must_use]
+    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
+        let is_present = self.check(exp);
+        if is_present {
+            self.bump()
+        }
+        is_present
+    }
+
+    /// If the next token is the given keyword, returns `true` without eating it.
+    /// An expectation is also added for diagnostics purposes.
+    #[inline]
+    #[must_use]
+    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
+        let is_keyword = self.token.is_keyword(exp.kw);
+        if !is_keyword {
+            self.expected_token_types.insert(exp.token_type);
+        }
+        is_keyword
+    }
+
+    #[inline]
+    #[must_use]
+    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
+        if self.check_keyword(exp) {
+            true
+        } else if case == Case::Insensitive
+            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
+            // Do an ASCII case-insensitive match, because all keywords are ASCII.
+            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
+        {
+            true
+        } else {
+            false
+        }
+    }
+
+    /// If the next token is the given keyword, eats it and returns `true`.
+    /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
+    // Public for rustc_builtin_macros and rustfmt usage.
+    #[inline]
+    #[must_use]
+    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
+        let is_keyword = self.check_keyword(exp);
+        if is_keyword {
+            self.bump();
+        }
+        is_keyword
+    }
+
+    /// Eats a keyword, optionally ignoring the case.
+    /// If the case differs (and is ignored) an error is issued.
+    /// This is useful for recovery.
+    #[inline]
+    #[must_use]
+    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
+        if self.eat_keyword(exp) {
+            true
+        } else if case == Case::Insensitive
+            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
+            // Do an ASCII case-insensitive match, because all keywords are ASCII.
+            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
+        {
+            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
+            self.bump();
+            true
+        } else {
+            false
+        }
+    }
+
+    /// If the next token is the given keyword, eats it and returns `true`.
+    /// Otherwise, returns `false`. No expectation is added.
+    // Public for rustc_builtin_macros usage.
+    #[inline]
+    #[must_use]
+    pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
+        let is_keyword = self.token.is_keyword(kw);
+        if is_keyword {
+            self.bump();
+        }
+        is_keyword
+    }
+
+    /// If the given word is not a keyword, signals an error.
+    /// If the next token is not the given word, signals an error.
+    /// Otherwise, eats it.
+    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
+        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
+    }
+
+    /// Consume a sequence produced by a metavar expansion, if present.
+    fn eat_metavar_seq<T>(
+        &mut self,
+        mv_kind: MetaVarKind,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> Option<T> {
+        self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
+    }
+
+    /// A slightly more general form of `eat_metavar_seq`, for use with the
+    /// `MetaVarKind` variants that have parameters, where an exact match isn't
+    /// desired.
+    fn eat_metavar_seq_with_matcher<T>(
+        &mut self,
+        match_mv_kind: impl Fn(MetaVarKind) -> bool,
+        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> Option<T> {
+        if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
+            && match_mv_kind(mv_kind)
+        {
+            self.bump();
+
+            // Recovery is disabled when parsing macro arguments, so it must
+            // also be disabled when reparsing pasted macro arguments,
+            // otherwise we get inconsistent results (e.g. #137874).
+            let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
+
+            let res = match res {
+                Ok(res) => res,
+                Err(err) => {
+                    // This can occur in unusual error cases, e.g. #139445.
+                    err.delay_as_bug();
+                    return None;
+                }
+            };
+
+            if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
+                && match_mv_kind(mv_kind)
+            {
+                self.bump();
+                Some(res)
+            } else {
+                // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248,
+                // where the reparse attempt of an invalid expr consumed the trailing invisible
+                // delimiter.
+                self.dcx()
+                    .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
+                None
+            }
+        } else {
+            None
+        }
+    }
+
+    /// Is the given keyword `kw` followed by a non-reserved identifier?
+    fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
+        self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_non_reserved_ident())
+    }
+
+    #[inline]
+    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
+        if !ok {
+            self.expected_token_types.insert(token_type);
+        }
+        ok
+    }
+
+    fn check_ident(&mut self) -> bool {
+        self.check_or_expected(self.token.is_ident(), TokenType::Ident)
+    }
+
+    fn check_path(&mut self) -> bool {
+        self.check_or_expected(self.token.is_path_start(), TokenType::Path)
+    }
+
+    fn check_type(&mut self) -> bool {
+        self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
+    }
+
+    fn check_const_arg(&mut self) -> bool {
+        self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
+    }
+
+    fn check_const_closure(&self) -> bool {
+        self.is_keyword_ahead(0, &[kw::Const])
+            && self.look_ahead(1, |t| match &t.kind {
+                // async closures do not work with const closures, so we do not parse that here.
+                token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
+                | token::OrOr
+                | token::Or => true,
+                _ => false,
+            })
+    }
+
+    fn check_inline_const(&self, dist: usize) -> bool {
+        self.is_keyword_ahead(dist, &[kw::Const])
+            && self.look_ahead(dist + 1, |t| match &t.kind {
+                token::OpenBrace => true,
+                token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
+                _ => false,
+            })
+    }
+
+    /// Checks to see if the next token is either `+` or `+=`.
+    /// Otherwise returns `false`.
+    #[inline]
+    fn check_plus(&mut self) -> bool {
+        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
+    }
+
+    /// Eats the expected token if it's present possibly breaking
+    /// compound tokens like multi-character operators in process.
+    /// Returns `true` if the token was eaten.
+    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
+        if self.token == *exp.tok {
+            self.bump();
+            return true;
+        }
+        match self.token.kind.break_two_token_op(1) {
+            Some((first, second)) if first == *exp.tok => {
+                let first_span = self.psess.source_map().start_point(self.token.span);
+                let second_span = self.token.span.with_lo(first_span.hi());
+                self.token = Token::new(first, first_span);
+                // Keep track of this token - if we end token capturing now,
+                // we'll want to append this token to the captured stream.
+                //
+                // If we consume any additional tokens, then this token
+                // is not needed (we'll capture the entire 'glued' token),
+                // and `bump` will set this field to 0.
+                self.break_last_token += 1;
+                // Use the spacing of the glued token as the spacing of the
+                // unglued second token.
+                self.bump_with((Token::new(second, second_span), self.token_spacing));
+                true
+            }
+            _ => {
+                self.expected_token_types.insert(exp.token_type);
+                false
+            }
+        }
+    }
+
+    /// Eats `+` possibly breaking tokens like `+=` in process.
+    fn eat_plus(&mut self) -> bool {
+        self.break_and_eat(exp!(Plus))
+    }
+
+    /// Eats `&` possibly breaking tokens like `&&` in process.
+    /// Signals an error if `&` is not eaten.
+    fn expect_and(&mut self) -> PResult<'a, ()> {
+        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
+    }
+
+    /// Eats `|` possibly breaking tokens like `||` in process.
+    /// Signals an error if `|` was not eaten.
+    fn expect_or(&mut self) -> PResult<'a, ()> {
+        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
+    }
+
+    /// Eats `<` possibly breaking tokens like `<<` in process.
+    fn eat_lt(&mut self) -> bool {
+        let ate = self.break_and_eat(exp!(Lt));
+        if ate {
+            // See doc comment for `unmatched_angle_bracket_count`.
+            self.unmatched_angle_bracket_count += 1;
+            debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
+        }
+        ate
+    }
+
+    /// Eats `<` possibly breaking tokens like `<<` in process.
+    /// Signals an error if `<` was not eaten.
+    fn expect_lt(&mut self) -> PResult<'a, ()> {
+        if self.eat_lt() { Ok(()) } else { self.unexpected() }
+    }
+
+    /// Eats `>` possibly breaking tokens like `>>` in process.
+    /// Signals an error if `>` was not eaten.
+    fn expect_gt(&mut self) -> PResult<'a, ()> {
+        if self.break_and_eat(exp!(Gt)) {
+            // See doc comment for `unmatched_angle_bracket_count`.
+            if self.unmatched_angle_bracket_count > 0 {
+                self.unmatched_angle_bracket_count -= 1;
+                debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
+            }
+            Ok(())
+        } else {
+            self.unexpected()
+        }
+    }
+
+    /// Checks if the next token is contained within `closes`, and returns `true` if so.
+    fn expect_any_with_type(
+        &mut self,
+        closes_expected: &[ExpTokenPair<'_>],
+        closes_not_expected: &[&TokenKind],
+    ) -> bool {
+        closes_expected.iter().any(|&close| self.check(close))
+            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
+    }
+
+    /// Parses a sequence until the specified delimiters. The function
+    /// `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
+    fn parse_seq_to_before_tokens<T>(
+        &mut self,
+        closes_expected: &[ExpTokenPair<'_>],
+        closes_not_expected: &[&TokenKind],
+        sep: SeqSep<'_>,
+        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
+        let mut first = true;
+        let mut recovered = Recovered::No;
+        let mut trailing = Trailing::No;
+        let mut v = ThinVec::new();
+
+        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
+            if self.token.kind.is_close_delim_or_eof() {
+                break;
+            }
+            if let Some(exp) = sep.sep {
+                if first {
+                    // no separator for the first element
+                    first = false;
+                } else {
+                    // check for separator
+                    match self.expect(exp) {
+                        Ok(Recovered::No) => {
+                            self.current_closure.take();
+                        }
+                        Ok(Recovered::Yes(guar)) => {
+                            self.current_closure.take();
+                            recovered = Recovered::Yes(guar);
+                            break;
+                        }
+                        Err(mut expect_err) => {
+                            let sp = self.prev_token.span.shrink_to_hi();
+                            let token_str = pprust::token_kind_to_string(exp.tok);
+
+                            match self.current_closure.take() {
+                                Some(closure_spans) if self.token == TokenKind::Semi => {
+                                    // Finding a semicolon instead of a comma
+                                    // after a closure body indicates that the
+                                    // closure body may be a block but the user
+                                    // forgot to put braces around its
+                                    // statements.
+
+                                    self.recover_missing_braces_around_closure_body(
+                                        closure_spans,
+                                        expect_err,
+                                    )?;
+
+                                    continue;
+                                }
+
+                                _ => {
+                                    // Attempt to keep parsing if it was a similar separator.
+                                    if exp.tok.similar_tokens().contains(&self.token.kind) {
+                                        self.bump();
+                                    }
+                                }
+                            }
+
+                            // If this was a missing `@` in a binding pattern
+                            // bail with a suggestion
+                            // https://github.com/rust-lang/rust/issues/72373
+                            if self.prev_token.is_ident() && self.token == token::DotDot {
+                                let msg = format!(
+                                    "if you meant to bind the contents of the rest of the array \
+                                     pattern into `{}`, use `@`",
+                                    pprust::token_to_string(&self.prev_token)
+                                );
+                                expect_err
+                                    .with_span_suggestion_verbose(
+                                        self.prev_token.span.shrink_to_hi().until(self.token.span),
+                                        msg,
+                                        " @ ",
+                                        Applicability::MaybeIncorrect,
+                                    )
+                                    .emit();
+                                break;
+                            }
+
+                            // Attempt to keep parsing if it was an omitted separator.
+                            self.last_unexpected_token_span = None;
+                            match f(self) {
+                                Ok(t) => {
+                                    // Parsed successfully, therefore most probably the code only
+                                    // misses a separator.
+                                    expect_err
+                                        .with_span_suggestion_short(
+                                            sp,
+                                            format!("missing `{token_str}`"),
+                                            token_str,
+                                            Applicability::MaybeIncorrect,
+                                        )
+                                        .emit();
+
+                                    v.push(t);
+                                    continue;
+                                }
+                                Err(e) => {
+                                    // Parsing failed, therefore it must be something more serious
+                                    // than just a missing separator.
+                                    for xx in &e.children {
+                                        // Propagate the help message from sub error `e` to main
+                                        // error `expect_err`.
+                                        expect_err.children.push(xx.clone());
+                                    }
+                                    e.cancel();
+                                    if self.token == token::Colon {
+                                        // We will try to recover in
+                                        // `maybe_recover_struct_lit_bad_delims`.
+                                        return Err(expect_err);
+                                    } else if let [exp] = closes_expected
+                                        && exp.token_type == TokenType::CloseParen
+                                    {
+                                        return Err(expect_err);
+                                    } else {
+                                        expect_err.emit();
+                                        break;
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            if sep.trailing_sep_allowed
+                && self.expect_any_with_type(closes_expected, closes_not_expected)
+            {
+                trailing = Trailing::Yes;
+                break;
+            }
+
+            let t = f(self)?;
+            v.push(t);
+        }
+
+        Ok((v, trailing, recovered))
+    }
+
+    fn recover_missing_braces_around_closure_body(
+        &mut self,
+        closure_spans: ClosureSpans,
+        mut expect_err: Diag<'_>,
+    ) -> PResult<'a, ()> {
+        let initial_semicolon = self.token.span;
+
+        while self.eat(exp!(Semi)) {
+            let _ = self
+                .parse_stmt_without_recovery(false, ForceCollect::No, false)
+                .unwrap_or_else(|e| {
+                    e.cancel();
+                    None
+                });
+        }
+
+        expect_err
+            .primary_message("closure bodies that contain statements must be surrounded by braces");
+
+        let preceding_pipe_span = closure_spans.closing_pipe;
+        let following_token_span = self.token.span;
+
+        let mut first_note = MultiSpan::from(vec![initial_semicolon]);
+        first_note.push_span_label(
+            initial_semicolon,
+            "this `;` turns the preceding closure into a statement",
+        );
+        first_note.push_span_label(
+            closure_spans.body,
+            "this expression is a statement because of the trailing semicolon",
+        );
+        expect_err.span_note(first_note, "statement found outside of a block");
+
+        let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
+        second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
+        second_note.push_span_label(
+            following_token_span,
+            "...but likely you meant the closure to end here",
+        );
+        expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
+
+        expect_err.span(vec![preceding_pipe_span, following_token_span]);
+
+        let opening_suggestion_str = " {".to_string();
+        let closing_suggestion_str = "}".to_string();
+
+        expect_err.multipart_suggestion(
+            "try adding braces",
+            vec![
+                (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
+                (following_token_span.shrink_to_lo(), closing_suggestion_str),
+            ],
+            Applicability::MaybeIncorrect,
+        );
+
+        expect_err.emit();
+
+        Ok(())
+    }
+
+    /// Parses a sequence, not including the delimiters. The function
+    /// `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
+    fn parse_seq_to_before_end<T>(
+        &mut self,
+        close: ExpTokenPair<'_>,
+        sep: SeqSep<'_>,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
+        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
+    }
+
+    /// Parses a sequence, including only the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
+    fn parse_seq_to_end<T>(
+        &mut self,
+        close: ExpTokenPair<'_>,
+        sep: SeqSep<'_>,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
+        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
+        if matches!(recovered, Recovered::No) && !self.eat(close) {
+            self.dcx().span_delayed_bug(
+                self.token.span,
+                "recovered but `parse_seq_to_before_end` did not give us the close token",
+            );
+        }
+        Ok((val, trailing))
+    }
+
+    /// Parses a sequence, including both delimiters. The function
+    /// `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
+    fn parse_unspanned_seq<T>(
+        &mut self,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
+        sep: SeqSep<'_>,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
+        self.expect(open)?;
+        self.parse_seq_to_end(close, sep, f)
+    }
+
+    /// Parses a comma-separated sequence, including both delimiters.
+    /// The function `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
+    fn parse_delim_comma_seq<T>(
+        &mut self,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
+        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
+    }
+
+    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
+    /// The function `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
+    fn parse_paren_comma_seq<T>(
+        &mut self,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
+        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
+    }
+
+    /// Advance the parser by one token using provided token as the next one.
+    fn bump_with(&mut self, next: (Token, Spacing)) {
+        self.inlined_bump_with(next)
+    }
+
+    /// This always-inlined version should only be used on hot code paths.
+    #[inline(always)]
+    fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
+        // Update the current and previous tokens.
+        self.prev_token = mem::replace(&mut self.token, next_token);
+        self.token_spacing = next_spacing;
+
+        // Diagnostics.
+        self.expected_token_types.clear();
+    }
+
+    /// Advance the parser by one token.
+    pub fn bump(&mut self) {
+        // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
+        // than `.0`/`.1` access.
+        let mut next = self.token_cursor.inlined_next();
+        self.num_bump_calls += 1;
+        // We got a token from the underlying cursor and no longer need to
+        // worry about an unglued token. See `break_and_eat` for more details.
+        self.break_last_token = 0;
+        if next.0.span.is_dummy() {
+            // Tweak the location for better diagnostics, but keep syntactic context intact.
+            let fallback_span = self.token.span;
+            next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
+        }
+        debug_assert!(!matches!(
+            next.0.kind,
+            token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
+        ));
+        self.inlined_bump_with(next)
+    }
+
+    /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
+    /// When `dist == 0` then the current token is looked at. `Eof` will be
+    /// returned if the look-ahead is any distance past the end of the tokens.
+    pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
+        if dist == 0 {
+            return looker(&self.token);
+        }
+
+        // Typically around 98% of the `dist > 0` cases have `dist == 1`, so we
+        // have a fast special case for that.
+        if dist == 1 {
+            // The index is zero because the tree cursor's index always points
+            // to the next token to be gotten.
+            match self.token_cursor.curr.curr() {
+                Some(tree) => {
+                    // Indexing stayed within the current token tree.
+                    match tree {
+                        TokenTree::Token(token, _) => return looker(token),
+                        &TokenTree::Delimited(dspan, _, delim, _) => {
+                            if !delim.skip() {
+                                return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
+                            }
+                        }
+                    }
+                }
+                None => {
+                    // The tree cursor lookahead went (one) past the end of the
+                    // current token tree. Try to return a close delimiter.
+                    if let Some(last) = self.token_cursor.stack.last()
+                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
+                        && !delim.skip()
+                    {
+                        // We are not in the outermost token stream, so we have
+                        // delimiters. Also, those delimiters are not skipped.
+                        return looker(&Token::new(delim.as_close_token_kind(), span.close));
+                    }
+                }
+            }
+        }
+
+        // Just clone the token cursor and use `next`, skipping delimiters as
+        // necessary. Slow but simple.
+        let mut cursor = self.token_cursor.clone();
+        let mut i = 0;
+        let mut token = Token::dummy();
+        while i < dist {
+            token = cursor.next().0;
+            if matches!(
+                token.kind,
+                token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
+            ) {
+                continue;
+            }
+            i += 1;
+        }
+        looker(&token)
+    }
+
+    /// Like `lookahead`, but skips over token trees rather than tokens. Useful
+    /// when looking past possible metavariable pasting sites.
+    pub fn tree_look_ahead<R>(
+        &self,
+        dist: usize,
+        looker: impl FnOnce(&TokenTree) -> R,
+    ) -> Option<R> {
+        assert_ne!(dist, 0);
+        self.token_cursor.curr.look_ahead(dist - 1).map(looker)
+    }
+
+    /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
+    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
+        self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
+    }
+
+    /// Parses asyncness: `async` or nothing.
+    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
+        let span = self.token_uninterpolated_span();
+        if self.eat_keyword_case(exp!(Async), case) {
+            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
+            // error if edition <= 2024, like we do with async and edition <= 2018?
+            if self.token_uninterpolated_span().at_least_rust_2024()
+                && self.eat_keyword_case(exp!(Gen), case)
+            {
+                let gen_span = self.prev_token_uninterpolated_span();
+                Some(CoroutineKind::AsyncGen {
+                    span: span.to(gen_span),
+                    closure_id: DUMMY_NODE_ID,
+                    return_impl_trait_id: DUMMY_NODE_ID,
+                })
+            } else {
+                Some(CoroutineKind::Async {
+                    span,
+                    closure_id: DUMMY_NODE_ID,
+                    return_impl_trait_id: DUMMY_NODE_ID,
+                })
+            }
+        } else if self.token_uninterpolated_span().at_least_rust_2024()
+            && self.eat_keyword_case(exp!(Gen), case)
+        {
+            Some(CoroutineKind::Gen {
+                span,
+                closure_id: DUMMY_NODE_ID,
+                return_impl_trait_id: DUMMY_NODE_ID,
+            })
+        } else {
+            None
+        }
+    }
+
+    /// Parses fn unsafety: `unsafe`, `safe` or nothing.
+    fn parse_safety(&mut self, case: Case) -> Safety {
+        if self.eat_keyword_case(exp!(Unsafe), case) {
+            Safety::Unsafe(self.prev_token_uninterpolated_span())
+        } else if self.eat_keyword_case(exp!(Safe), case) {
+            Safety::Safe(self.prev_token_uninterpolated_span())
+        } else {
+            Safety::Default
+        }
+    }
+
+    /// Parses constness: `const` or nothing.
+    fn parse_constness(&mut self, case: Case) -> Const {
+        self.parse_constness_(case, false)
+    }
+
+    /// Parses constness for closures (case sensitive, feature-gated)
+    fn parse_closure_constness(&mut self) -> Const {
+        let constness = self.parse_constness_(Case::Sensitive, true);
+        if let Const::Yes(span) = constness {
+            self.psess.gated_spans.gate(sym::const_closures, span);
+        }
+        constness
+    }
+
+    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
+        // Avoid const blocks and const closures to be parsed as const items
+        if (self.check_const_closure() == is_closure)
+            && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
+            && self.eat_keyword_case(exp!(Const), case)
+        {
+            Const::Yes(self.prev_token_uninterpolated_span())
+        } else {
+            Const::No
+        }
+    }
+
+    /// Parses inline const expressions.
+    fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
+        self.expect_keyword(exp!(Const))?;
+        let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
+        let anon_const = AnonConst {
+            id: DUMMY_NODE_ID,
+            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
+        };
+        let blk_span = anon_const.value.span;
+        let kind = if pat {
+            let guar = self
+                .dcx()
+                .struct_span_err(blk_span, "const blocks cannot be used as patterns")
+                .with_help(
+                    "use a named `const`-item or an `if`-guard (`x if x == const { ... }`) instead",
+                )
+                .emit();
+            ExprKind::Err(guar)
+        } else {
+            ExprKind::ConstBlock(anon_const)
+        };
+        Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
+    }
+
+    /// Parses mutability (`mut` or nothing).
+    fn parse_mutability(&mut self) -> Mutability {
+        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
+    }
+
+    /// Parses reference binding mode (`ref`, `ref mut`, or nothing).
+    fn parse_byref(&mut self) -> ByRef {
+        if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
+    }
+
+    /// Possibly parses mutability (`const` or `mut`).
+    fn parse_const_or_mut(&mut self) -> Option<Mutability> {
+        if self.eat_keyword(exp!(Mut)) {
+            Some(Mutability::Mut)
+        } else if self.eat_keyword(exp!(Const)) {
+            Some(Mutability::Not)
+        } else {
+            None
+        }
+    }
+
+    fn parse_field_name(&mut self) -> PResult<'a, Ident> {
+        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
+        {
+            if let Some(suffix) = suffix {
+                self.expect_no_tuple_index_suffix(self.token.span, suffix);
+            }
+            self.bump();
+            Ok(Ident::new(symbol, self.prev_token.span))
+        } else {
+            self.parse_ident_common(true)
+        }
+    }
+
+    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
+        if let Some(args) = self.parse_delim_args_inner() {
+            Ok(P(args))
+        } else {
+            self.unexpected_any()
+        }
+    }
+
+    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
+        Ok(if let Some(args) = self.parse_delim_args_inner() {
+            AttrArgs::Delimited(args)
+        } else if self.eat(exp!(Eq)) {
+            let eq_span = self.prev_token.span;
+            AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
+        } else {
+            AttrArgs::Empty
+        })
+    }
+
+    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
+        let delimited = self.check(exp!(OpenParen))
+            || self.check(exp!(OpenBracket))
+            || self.check(exp!(OpenBrace));
+
+        delimited.then(|| {
+            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
+                unreachable!()
+            };
+            DelimArgs { dspan, delim, tokens }
+        })
+    }
+
+    /// Parses a single token tree from the input.
+    pub fn parse_token_tree(&mut self) -> TokenTree {
+        if self.token.kind.open_delim().is_some() {
+            // Clone the `TokenTree::Delimited` that we are currently
+            // within. That's what we are going to return.
+            let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
+            debug_assert_matches!(tree, TokenTree::Delimited(..));
+
+            // Advance the token cursor through the entire delimited
+            // sequence. After getting the `OpenDelim` we are *within* the
+            // delimited sequence, i.e. at depth `d`. After getting the
+            // matching `CloseDelim` we are *after* the delimited sequence,
+            // i.e. at depth `d - 1`.
+            let target_depth = self.token_cursor.stack.len() - 1;
+            loop {
+                // Advance one token at a time, so `TokenCursor::next()`
+                // can capture these tokens if necessary.
+                self.bump();
+                if self.token_cursor.stack.len() == target_depth {
+                    debug_assert!(self.token.kind.close_delim().is_some());
+                    break;
+                }
+            }
+
+            // Consume close delimiter
+            self.bump();
+            tree
+        } else {
+            assert!(!self.token.kind.is_close_delim_or_eof());
+            let prev_spacing = self.token_spacing;
+            self.bump();
+            TokenTree::Token(self.prev_token, prev_spacing)
+        }
+    }
+
+    pub fn parse_tokens(&mut self) -> TokenStream {
+        let mut result = Vec::new();
+        loop {
+            if self.token.kind.is_close_delim_or_eof() {
+                break;
+            } else {
+                result.push(self.parse_token_tree());
+            }
+        }
+        TokenStream::new(result)
+    }
+
+    /// Evaluates the closure with restrictions in place.
+    ///
+    /// Afters the closure is evaluated, restrictions are reset.
+    fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
+        let old = self.restrictions;
+        self.restrictions = res;
+        let res = f(self);
+        self.restrictions = old;
+        res
+    }
+
+    /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
+    /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
+    /// If the following element can't be a tuple (i.e., it's a function definition), then
+    /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
+    /// so emit a proper diagnostic.
+    // Public for rustfmt usage.
+    pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
+        if let Some(vis) = self
+            .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
+        {
+            return Ok(vis);
+        }
+
+        if !self.eat_keyword(exp!(Pub)) {
+            // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
+            // keyword to grab a span from for inherited visibility; an empty span at the
+            // beginning of the current token would seem to be the "Schelling span".
+            return Ok(Visibility {
+                span: self.token.span.shrink_to_lo(),
+                kind: VisibilityKind::Inherited,
+                tokens: None,
+            });
+        }
+        let lo = self.prev_token.span;
+
+        if self.check(exp!(OpenParen)) {
+            // We don't `self.bump()` the `(` yet because this might be a struct definition where
+            // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
+            // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
+            // by the following tokens.
+            if self.is_keyword_ahead(1, &[kw::In]) {
+                // Parse `pub(in path)`.
+                self.bump(); // `(`
+                self.bump(); // `in`
+                let path = self.parse_path(PathStyle::Mod)?; // `path`
+                self.expect(exp!(CloseParen))?; // `)`
+                let vis = VisibilityKind::Restricted {
+                    path: P(path),
+                    id: ast::DUMMY_NODE_ID,
+                    shorthand: false,
+                };
+                return Ok(Visibility {
+                    span: lo.to(self.prev_token.span),
+                    kind: vis,
+                    tokens: None,
+                });
+            } else if self.look_ahead(2, |t| t == &token::CloseParen)
+                && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
+            {
+                // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
+                self.bump(); // `(`
+                let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
+                self.expect(exp!(CloseParen))?; // `)`
+                let vis = VisibilityKind::Restricted {
+                    path: P(path),
+                    id: ast::DUMMY_NODE_ID,
+                    shorthand: true,
+                };
+                return Ok(Visibility {
+                    span: lo.to(self.prev_token.span),
+                    kind: vis,
+                    tokens: None,
+                });
+            } else if let FollowedByType::No = fbt {
+                // Provide this diagnostic if a type cannot follow;
+                // in particular, if this is not a tuple struct.
+                self.recover_incorrect_vis_restriction()?;
+                // Emit diagnostic, but continue with public visibility.
+            }
+        }
+
+        Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
+    }
+
+    /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
+    fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
+        self.bump(); // `(`
+        let path = self.parse_path(PathStyle::Mod)?;
+        self.expect(exp!(CloseParen))?; // `)`
+
+        let path_str = pprust::path_to_string(&path);
+        self.dcx()
+            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
+
+        Ok(())
+    }
+
+    /// Parses `extern string_literal?`.
+    fn parse_extern(&mut self, case: Case) -> Extern {
+        if self.eat_keyword_case(exp!(Extern), case) {
+            let mut extern_span = self.prev_token.span;
+            let abi = self.parse_abi();
+            if let Some(abi) = abi {
+                extern_span = extern_span.to(abi.span);
+            }
+            Extern::from_abi(abi, extern_span)
+        } else {
+            Extern::None
+        }
+    }
+
+    /// Parses a string literal as an ABI spec.
+    fn parse_abi(&mut self) -> Option<StrLit> {
+        match self.parse_str_lit() {
+            Ok(str_lit) => Some(str_lit),
+            Err(Some(lit)) => match lit.kind {
+                ast::LitKind::Err(_) => None,
+                _ => {
+                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
+                    None
+                }
+            },
+            Err(None) => None,
+        }
+    }
+
+    fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
+        &mut self,
+        f: impl FnOnce(&mut Self) -> PResult<'a, R>,
+    ) -> PResult<'a, R> {
+        // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
+        // `ForceCollect::Yes`
+        self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
+            Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
+        })
+    }
+
+    /// Checks for `::` or, potentially, `:::` and then look ahead after it.
+    fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
+        if self.check(exp!(PathSep)) {
+            if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
+                debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
+                self.look_ahead(2, looker)
+            } else {
+                self.look_ahead(1, looker)
+            }
+        } else {
+            false
+        }
+    }
+
+    /// `::{` or `::*`
+    fn is_import_coupler(&mut self) -> bool {
+        self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
+    }
+
+    // Debug view of the parser's token stream, up to `{lookahead}` tokens.
+    // Only used when debugging.
+    #[allow(unused)]
+    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
+        fmt::from_fn(move |f| {
+            let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
+
+            // we don't need N spans, but we want at least one, so print all of prev_token
+            dbg_fmt.field("prev_token", &self.prev_token);
+            let mut tokens = vec![];
+            for i in 0..lookahead {
+                let tok = self.look_ahead(i, |tok| tok.kind);
+                let is_eof = tok == TokenKind::Eof;
+                tokens.push(tok);
+                if is_eof {
+                    // Don't look ahead past EOF.
+                    break;
+                }
+            }
+            dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
+            dbg_fmt.field("approx_token_stream_pos", &self.num_bump_calls);
+
+            // some fields are interesting for certain values, as they relate to macro parsing
+            if let Some(subparser) = self.subparser_name {
+                dbg_fmt.field("subparser_name", &subparser);
+            }
+            if let Recovery::Forbidden = self.recovery {
+                dbg_fmt.field("recovery", &self.recovery);
+            }
+
+            // imply there's "more to know" than this view
+            dbg_fmt.finish_non_exhaustive()
+        })
+    }
+
+    pub fn clear_expected_token_types(&mut self) {
+        self.expected_token_types.clear();
+    }
+
+    pub fn approx_token_stream_pos(&self) -> u32 {
+        self.num_bump_calls
+    }
+
+    /// For interpolated `self.token`, returns a span of the fragment to which
+    /// the interpolated token refers. For all other tokens this is just a
+    /// regular span. It is particularly important to use this for identifiers
+    /// and lifetimes for which spans affect name resolution and edition
+    /// checks. Note that keywords are also identifiers, so they should use
+    /// this if they keep spans or perform edition checks.
+    pub fn token_uninterpolated_span(&self) -> Span {
+        match &self.token.kind {
+            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
+            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
+            _ => self.token.span,
+        }
+    }
+
+    /// Like `token_uninterpolated_span`, but works on `self.prev_token`.
+    pub fn prev_token_uninterpolated_span(&self) -> Span {
+        match &self.prev_token.kind {
+            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
+            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
+            _ => self.prev_token.span,
+        }
+    }
+}
+
+// Metavar captures of various kinds.
+#[derive(Clone, Debug)]
+pub enum ParseNtResult {
+    Tt(TokenTree),
+    Ident(Ident, IdentIsRaw),
+    Lifetime(Ident, IdentIsRaw),
+    Item(P<ast::Item>),
+    Block(P<ast::Block>),
+    Stmt(P<ast::Stmt>),
+    Pat(P<ast::Pat>, NtPatKind),
+    Expr(P<ast::Expr>, NtExprKind),
+    Literal(P<ast::Expr>),
+    Ty(P<ast::Ty>),
+    Meta(P<ast::AttrItem>),
+    Path(P<ast::Path>),
+    Vis(P<ast::Visibility>),
+}
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
new file mode 100644
index 00000000000..7c83e96c160
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -0,0 +1,204 @@
+use rustc_ast::ptr::P;
+use rustc_ast::token::NtExprKind::*;
+use rustc_ast::token::NtPatKind::*;
+use rustc_ast::token::{self, InvisibleOrigin, MetaVarKind, NonterminalKind, Token};
+use rustc_errors::PResult;
+use rustc_span::{Ident, kw};
+
+use crate::errors::UnexpectedNonterminal;
+use crate::parser::pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
+use crate::parser::{FollowedByType, ForceCollect, ParseNtResult, Parser, PathStyle};
+
+impl<'a> Parser<'a> {
+    /// Checks whether a non-terminal may begin with a particular token.
+    ///
+    /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with
+    /// that token. Be conservative (return true) if not sure. Inlined because it has a single call
+    /// site.
+    #[inline]
+    pub fn nonterminal_may_begin_with(kind: NonterminalKind, token: &Token) -> bool {
+        /// Checks whether the non-terminal may contain a single (non-keyword) identifier.
+        fn may_be_ident(kind: MetaVarKind) -> bool {
+            match kind {
+                MetaVarKind::Stmt
+                | MetaVarKind::Pat(_)
+                | MetaVarKind::Expr { .. }
+                | MetaVarKind::Ty { .. }
+                | MetaVarKind::Literal // `true`, `false`
+                | MetaVarKind::Meta { .. }
+                | MetaVarKind::Path => true,
+
+                MetaVarKind::Item
+                | MetaVarKind::Block
+                | MetaVarKind::Vis => false,
+
+                MetaVarKind::Ident
+                | MetaVarKind::Lifetime
+                | MetaVarKind::TT => unreachable!(),
+            }
+        }
+
+        match kind {
+            // `expr_2021` and earlier
+            NonterminalKind::Expr(Expr2021 { .. }) => {
+                token.can_begin_expr()
+                // This exception is here for backwards compatibility.
+                && !token.is_keyword(kw::Let)
+                // This exception is here for backwards compatibility.
+                && !token.is_keyword(kw::Const)
+            }
+            // Current edition expressions
+            NonterminalKind::Expr(Expr) => {
+                // In Edition 2024, `_` is considered an expression, so we
+                // need to allow it here because `token.can_begin_expr()` does
+                // not consider `_` to be an expression.
+                //
+                // Because `can_begin_expr` is used elsewhere, we need to reduce
+                // the scope of where the `_` is considered an expression to
+                // just macro parsing code.
+                (token.can_begin_expr() || token.is_keyword(kw::Underscore))
+                // This exception is here for backwards compatibility.
+                && !token.is_keyword(kw::Let)
+            }
+            NonterminalKind::Ty => token.can_begin_type(),
+            NonterminalKind::Ident => get_macro_ident(token).is_some(),
+            NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
+            NonterminalKind::Vis => match token.kind {
+                // The follow-set of :vis + "priv" keyword + interpolated/metavar-expansion.
+                token::Comma
+                | token::Ident(..)
+                | token::NtIdent(..)
+                | token::NtLifetime(..)
+                | token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => true,
+                _ => token.can_begin_type(),
+            },
+            NonterminalKind::Block => match &token.kind {
+                token::OpenBrace => true,
+                token::NtLifetime(..) => true,
+                token::OpenInvisible(InvisibleOrigin::MetaVar(k)) => match k {
+                    MetaVarKind::Block
+                    | MetaVarKind::Stmt
+                    | MetaVarKind::Expr { .. }
+                    | MetaVarKind::Literal => true,
+                    MetaVarKind::Item
+                    | MetaVarKind::Pat(_)
+                    | MetaVarKind::Ty { .. }
+                    | MetaVarKind::Meta { .. }
+                    | MetaVarKind::Path
+                    | MetaVarKind::Vis => false,
+                    MetaVarKind::Lifetime | MetaVarKind::Ident | MetaVarKind::TT => {
+                        unreachable!()
+                    }
+                },
+                _ => false,
+            },
+            NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
+                token::PathSep | token::Ident(..) | token::NtIdent(..) => true,
+                token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => may_be_ident(*kind),
+                _ => false,
+            },
+            NonterminalKind::Pat(pat_kind) => token.can_begin_pattern(pat_kind),
+            NonterminalKind::Lifetime => match &token.kind {
+                token::Lifetime(..) | token::NtLifetime(..) => true,
+                _ => false,
+            },
+            NonterminalKind::TT | NonterminalKind::Item | NonterminalKind::Stmt => {
+                token.kind.close_delim().is_none()
+            }
+        }
+    }
+
+    /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call
+    /// site.
+    #[inline]
+    pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseNtResult> {
+        // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
+        // which requires having captured tokens available. Since we cannot determine
+        // in advance whether or not a proc-macro will be (transitively) invoked,
+        // we always capture tokens for any nonterminal that needs them.
+        match kind {
+            // Note that TT is treated differently to all the others.
+            NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())),
+            NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
+                Some(item) => Ok(ParseNtResult::Item(item)),
+                None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))),
+            },
+            NonterminalKind::Block => {
+                // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
+                // the ':block' matcher does not support them
+                Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?))
+            }
+            NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
+                Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))),
+                None => {
+                    Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span)))
+                }
+            },
+            NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat(
+                self.collect_tokens_no_attrs(|this| match pat_kind {
+                    PatParam { .. } => this.parse_pat_no_top_alt(None, None),
+                    PatWithOr => this.parse_pat_no_top_guard(
+                        None,
+                        RecoverComma::No,
+                        RecoverColon::No,
+                        CommaRecoveryMode::EitherTupleOrPipe,
+                    ),
+                })?,
+                pat_kind,
+            )),
+            NonterminalKind::Expr(expr_kind) => {
+                Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind))
+            }
+            NonterminalKind::Literal => {
+                // The `:literal` matcher does not support attributes.
+                Ok(ParseNtResult::Literal(
+                    self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
+                ))
+            }
+            NonterminalKind::Ty => Ok(ParseNtResult::Ty(
+                self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
+            )),
+            // This could be handled like a token, since it is one.
+            NonterminalKind::Ident => {
+                if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
+                    self.bump();
+                    Ok(ParseNtResult::Ident(ident, is_raw))
+                } else {
+                    Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
+                        span: self.token.span,
+                        token: self.token,
+                    }))
+                }
+            }
+            NonterminalKind::Path => Ok(ParseNtResult::Path(P(
+                self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
+            ))),
+            NonterminalKind::Meta => {
+                Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)))
+            }
+            NonterminalKind::Vis => {
+                Ok(ParseNtResult::Vis(P(self
+                    .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?)))
+            }
+            NonterminalKind::Lifetime => {
+                // We want to keep `'keyword` parsing, just like `keyword` is still
+                // an ident for nonterminal purposes.
+                if let Some((ident, is_raw)) = self.token.lifetime() {
+                    self.bump();
+                    Ok(ParseNtResult::Lifetime(ident, is_raw))
+                } else {
+                    Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
+                        span: self.token.span,
+                        token: self.token,
+                    }))
+                }
+            }
+        }
+    }
+}
+
+/// The token is an identifier, but not `_`.
+/// We prohibit passing `_` to macros expecting `ident` for now.
+fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> {
+    token.ident().filter(|(ident, _)| ident.name != kw::Underscore)
+}
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
new file mode 100644
index 00000000000..64653ee2a04
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -0,0 +1,1759 @@
+use std::ops::Bound;
+
+use rustc_ast::mut_visit::{self, MutVisitor};
+use rustc_ast::ptr::P;
+use rustc_ast::token::NtPatKind::*;
+use rustc_ast::token::{self, IdentIsRaw, MetaVarKind, Token};
+use rustc_ast::util::parser::ExprPrecedence;
+use rustc_ast::visit::{self, Visitor};
+use rustc_ast::{
+    self as ast, Arm, AttrVec, BindingMode, ByRef, Expr, ExprKind, LocalKind, MacCall, Mutability,
+    Pat, PatField, PatFieldsRest, PatKind, Path, QSelf, RangeEnd, RangeSyntax, Stmt, StmtKind,
+};
+use rustc_ast_pretty::pprust;
+use rustc_errors::{Applicability, Diag, DiagArgValue, PResult, StashKey};
+use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::source_map::{Spanned, respan};
+use rustc_span::{BytePos, ErrorGuaranteed, Ident, Span, kw, sym};
+use thin_vec::{ThinVec, thin_vec};
+
+use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
+use crate::errors::{
+    self, AmbiguousRangePattern, AtDotDotInStructPattern, AtInStructPattern,
+    DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed, DotDotDotRestPattern,
+    EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt, ExpectedCommaAfterPatternField,
+    GenericArgsInPatRequireTurbofishSyntax, InclusiveRangeExtraEquals, InclusiveRangeMatchArrow,
+    InclusiveRangeNoEnd, InvalidMutInPattern, ParenRangeSuggestion, PatternOnWrongSideOfAt,
+    RemoveLet, RepeatedMutInPattern, SwitchRefBoxOrder, TopLevelOrPatternNotAllowed,
+    TopLevelOrPatternNotAllowedSugg, TrailingVertNotAllowed, UnexpectedExpressionInPattern,
+    UnexpectedExpressionInPatternSugg, UnexpectedLifetimeInPattern, UnexpectedParenInRangePat,
+    UnexpectedParenInRangePatSugg, UnexpectedVertVertBeforeFunctionParam,
+    UnexpectedVertVertInPattern, WrapInParens,
+};
+use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal};
+use crate::{exp, maybe_recover_from_interpolated_ty_qpath};
+
+#[derive(PartialEq, Copy, Clone)]
+pub enum Expected {
+    ParameterName,
+    ArgumentName,
+    Identifier,
+    BindingPattern,
+}
+
+impl Expected {
+    // FIXME(#100717): migrate users of this to proper localization
+    fn to_string_or_fallback(expected: Option<Expected>) -> &'static str {
+        match expected {
+            Some(Expected::ParameterName) => "parameter name",
+            Some(Expected::ArgumentName) => "argument name",
+            Some(Expected::Identifier) => "identifier",
+            Some(Expected::BindingPattern) => "binding pattern",
+            None => "pattern",
+        }
+    }
+}
+
+const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here";
+
+/// Whether or not to recover a `,` when parsing or-patterns.
+#[derive(PartialEq, Copy, Clone)]
+pub enum RecoverComma {
+    Yes,
+    No,
+}
+
+/// Whether or not to recover a `:` when parsing patterns that were meant to be paths.
+#[derive(PartialEq, Copy, Clone)]
+pub enum RecoverColon {
+    Yes,
+    No,
+}
+
+/// Whether or not to recover a `a, b` when parsing patterns as `(a, b)` or that *and* `a | b`.
+#[derive(PartialEq, Copy, Clone)]
+pub enum CommaRecoveryMode {
+    LikelyTuple,
+    EitherTupleOrPipe,
+}
+
+/// The result of `eat_or_separator`. We want to distinguish which case we are in to avoid
+/// emitting duplicate diagnostics.
+#[derive(Debug, Clone, Copy)]
+enum EatOrResult {
+    /// We recovered from a trailing vert.
+    TrailingVert,
+    /// We ate an `|` (or `||` and recovered).
+    AteOr,
+    /// We did not eat anything (i.e. the current token is not `|` or `||`).
+    None,
+}
+
+/// The syntax location of a given pattern. Used for diagnostics.
+#[derive(Clone, Copy)]
+pub enum PatternLocation {
+    LetBinding,
+    FunctionParameter,
+}
+
+impl<'a> Parser<'a> {
+    /// Parses a pattern.
+    ///
+    /// Corresponds to `Pattern` in RFC 3637 and admits guard patterns at the top level.
+    /// Used when parsing patterns in all cases where neither `PatternNoTopGuard` nor
+    /// `PatternNoTopAlt` (see below) are used.
+    pub fn parse_pat_allow_top_guard(
+        &mut self,
+        expected: Option<Expected>,
+        rc: RecoverComma,
+        ra: RecoverColon,
+        rt: CommaRecoveryMode,
+    ) -> PResult<'a, P<Pat>> {
+        let pat = self.parse_pat_no_top_guard(expected, rc, ra, rt)?;
+
+        if self.eat_keyword(exp!(If)) {
+            let cond = self.parse_expr()?;
+            // Feature-gate guard patterns
+            self.psess.gated_spans.gate(sym::guard_patterns, cond.span);
+            let span = pat.span.to(cond.span);
+            Ok(self.mk_pat(span, PatKind::Guard(pat, cond)))
+        } else {
+            Ok(pat)
+        }
+    }
+
+    /// Parses a pattern.
+    ///
+    /// Corresponds to `PatternNoTopAlt` in RFC 3637 and does not admit or-patterns
+    /// or guard patterns at the top level. Used when parsing the parameters of lambda
+    /// expressions, functions, function pointers, and `pat_param` macro fragments.
+    pub fn parse_pat_no_top_alt(
+        &mut self,
+        expected: Option<Expected>,
+        syntax_loc: Option<PatternLocation>,
+    ) -> PResult<'a, P<Pat>> {
+        self.parse_pat_with_range_pat(true, expected, syntax_loc)
+    }
+
+    /// Parses a pattern.
+    ///
+    /// Corresponds to `PatternNoTopGuard` in RFC 3637 and allows or-patterns, but not
+    /// guard patterns, at the top level. Used for parsing patterns in `pat` fragments (until
+    /// the next edition) and `let`, `if let`, and `while let` expressions.
+    ///
+    /// Note that after the FCP in <https://github.com/rust-lang/rust/issues/81415>,
+    /// a leading vert is allowed in nested or-patterns, too. This allows us to
+    /// simplify the grammar somewhat.
+    pub fn parse_pat_no_top_guard(
+        &mut self,
+        expected: Option<Expected>,
+        rc: RecoverComma,
+        ra: RecoverColon,
+        rt: CommaRecoveryMode,
+    ) -> PResult<'a, P<Pat>> {
+        self.parse_pat_no_top_guard_inner(expected, rc, ra, rt, None).map(|(pat, _)| pat)
+    }
+
+    /// Returns the pattern and a bool indicating whether we recovered from a trailing vert (true =
+    /// recovered).
+    fn parse_pat_no_top_guard_inner(
+        &mut self,
+        expected: Option<Expected>,
+        rc: RecoverComma,
+        ra: RecoverColon,
+        rt: CommaRecoveryMode,
+        syntax_loc: Option<PatternLocation>,
+    ) -> PResult<'a, (P<Pat>, bool)> {
+        // Keep track of whether we recovered from a trailing vert so that we can avoid duplicated
+        // suggestions (which bothers rustfix).
+        //
+        // Allow a '|' before the pats (RFCs 1925, 2530, and 2535).
+        let (leading_vert_span, mut trailing_vert) = match self.eat_or_separator(None) {
+            EatOrResult::AteOr => (Some(self.prev_token.span), false),
+            EatOrResult::TrailingVert => (None, true),
+            EatOrResult::None => (None, false),
+        };
+
+        // Parse the first pattern (`p_0`).
+        let mut first_pat = match self.parse_pat_no_top_alt(expected, syntax_loc) {
+            Ok(pat) => pat,
+            Err(err)
+                if self.token.is_reserved_ident()
+                    && !self.token.is_keyword(kw::In)
+                    && !self.token.is_keyword(kw::If) =>
+            {
+                err.emit();
+                self.bump();
+                self.mk_pat(self.token.span, PatKind::Wild)
+            }
+            Err(err) => return Err(err),
+        };
+        if rc == RecoverComma::Yes && !first_pat.could_be_never_pattern() {
+            self.maybe_recover_unexpected_comma(first_pat.span, rt)?;
+        }
+
+        // If the next token is not a `|`,
+        // this is not an or-pattern and we should exit here.
+        if !self.check(exp!(Or)) && self.token != token::OrOr {
+            // If we parsed a leading `|` which should be gated,
+            // then we should really gate the leading `|`.
+            // This complicated procedure is done purely for diagnostics UX.
+
+            // Check if the user wrote `foo:bar` instead of `foo::bar`.
+            if ra == RecoverColon::Yes {
+                first_pat = self.maybe_recover_colon_colon_in_pat_typo(first_pat, expected);
+            }
+
+            if let Some(leading_vert_span) = leading_vert_span {
+                // If there was a leading vert, treat this as an or-pattern. This improves
+                // diagnostics.
+                let span = leading_vert_span.to(self.prev_token.span);
+                return Ok((self.mk_pat(span, PatKind::Or(thin_vec![first_pat])), trailing_vert));
+            }
+
+            return Ok((first_pat, trailing_vert));
+        }
+
+        // Parse the patterns `p_1 | ... | p_n` where `n > 0`.
+        let lo = leading_vert_span.unwrap_or(first_pat.span);
+        let mut pats = thin_vec![first_pat];
+        loop {
+            match self.eat_or_separator(Some(lo)) {
+                EatOrResult::AteOr => {}
+                EatOrResult::None => break,
+                EatOrResult::TrailingVert => {
+                    trailing_vert = true;
+                    break;
+                }
+            }
+            let pat = self.parse_pat_no_top_alt(expected, syntax_loc).map_err(|mut err| {
+                err.span_label(lo, WHILE_PARSING_OR_MSG);
+                err
+            })?;
+            if rc == RecoverComma::Yes && !pat.could_be_never_pattern() {
+                self.maybe_recover_unexpected_comma(pat.span, rt)?;
+            }
+            pats.push(pat);
+        }
+        let or_pattern_span = lo.to(self.prev_token.span);
+
+        Ok((self.mk_pat(or_pattern_span, PatKind::Or(pats)), trailing_vert))
+    }
+
+    /// Parse a pattern and (maybe) a `Colon` in positions where a pattern may be followed by a
+    /// type annotation (e.g. for `let` bindings or `fn` params).
+    ///
+    /// Generally, this corresponds to `pat_no_top_alt` followed by an optional `Colon`. It will
+    /// eat the `Colon` token if one is present.
+    ///
+    /// The return value represents the parsed pattern and `true` if a `Colon` was parsed (`false`
+    /// otherwise).
+    pub(super) fn parse_pat_before_ty(
+        &mut self,
+        expected: Option<Expected>,
+        rc: RecoverComma,
+        syntax_loc: PatternLocation,
+    ) -> PResult<'a, (P<Pat>, bool)> {
+        // We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
+        // or-patterns so that we can detect when a user tries to use it. This allows us to print a
+        // better error message.
+        let (pat, trailing_vert) = self.parse_pat_no_top_guard_inner(
+            expected,
+            rc,
+            RecoverColon::No,
+            CommaRecoveryMode::LikelyTuple,
+            Some(syntax_loc),
+        )?;
+        let colon = self.eat(exp!(Colon));
+
+        if let PatKind::Or(pats) = &pat.kind {
+            let span = pat.span;
+            let sub = if pats.len() == 1 {
+                Some(TopLevelOrPatternNotAllowedSugg::RemoveLeadingVert {
+                    span: span.with_hi(span.lo() + BytePos(1)),
+                })
+            } else {
+                Some(TopLevelOrPatternNotAllowedSugg::WrapInParens {
+                    span,
+                    suggestion: WrapInParens { lo: span.shrink_to_lo(), hi: span.shrink_to_hi() },
+                })
+            };
+
+            let err = self.dcx().create_err(match syntax_loc {
+                PatternLocation::LetBinding => {
+                    TopLevelOrPatternNotAllowed::LetBinding { span, sub }
+                }
+                PatternLocation::FunctionParameter => {
+                    TopLevelOrPatternNotAllowed::FunctionParameter { span, sub }
+                }
+            });
+            if trailing_vert {
+                err.delay_as_bug();
+            } else {
+                err.emit();
+            }
+        }
+
+        Ok((pat, colon))
+    }
+
+    /// Parse the pattern for a function or function pointer parameter, followed by a colon.
+    ///
+    /// The return value represents the parsed pattern and `true` if a `Colon` was parsed (`false`
+    /// otherwise).
+    pub(super) fn parse_fn_param_pat_colon(&mut self) -> PResult<'a, (P<Pat>, bool)> {
+        // In order to get good UX, we first recover in the case of a leading vert for an illegal
+        // top-level or-pat. Normally, this means recovering both `|` and `||`, but in this case,
+        // a leading `||` probably doesn't indicate an or-pattern attempt, so we handle that
+        // separately.
+        if let token::OrOr = self.token.kind {
+            self.dcx().emit_err(UnexpectedVertVertBeforeFunctionParam { span: self.token.span });
+            self.bump();
+        }
+
+        self.parse_pat_before_ty(
+            Some(Expected::ParameterName),
+            RecoverComma::No,
+            PatternLocation::FunctionParameter,
+        )
+    }
+
+    /// Eat the or-pattern `|` separator.
+    /// If instead a `||` token is encountered, recover and pretend we parsed `|`.
+    fn eat_or_separator(&mut self, lo: Option<Span>) -> EatOrResult {
+        if self.recover_trailing_vert(lo) {
+            EatOrResult::TrailingVert
+        } else if self.token.kind == token::OrOr {
+            // Found `||`; Recover and pretend we parsed `|`.
+            self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
+            self.bump();
+            EatOrResult::AteOr
+        } else if self.eat(exp!(Or)) {
+            EatOrResult::AteOr
+        } else {
+            EatOrResult::None
+        }
+    }
+
+    /// Recover if `|` or `||` is the current token and we have one of the
+    /// tokens `=>`, `if`, `=`, `:`, `;`, `,`, `]`, `)`, or `}` ahead of us.
+    ///
+    /// These tokens all indicate that we reached the end of the or-pattern
+    /// list and can now reliably say that the `|` was an illegal trailing vert.
+    /// Note that there are more tokens such as `@` for which we know that the `|`
+    /// is an illegal parse. However, the user's intent is less clear in that case.
+    fn recover_trailing_vert(&mut self, lo: Option<Span>) -> bool {
+        let is_end_ahead = self.look_ahead(1, |token| {
+            matches!(
+                &token.uninterpolate().kind,
+                token::FatArrow // e.g. `a | => 0,`.
+                | token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`.
+                | token::Eq // e.g. `let a | = 0`.
+                | token::Semi // e.g. `let a |;`.
+                | token::Colon // e.g. `let a | :`.
+                | token::Comma // e.g. `let (a |,)`.
+                | token::CloseBracket // e.g. `let [a | ]`.
+                | token::CloseParen // e.g. `let (a | )`.
+                | token::CloseBrace // e.g. `let A { f: a | }`.
+            )
+        });
+        match (is_end_ahead, &self.token.kind) {
+            (true, token::Or | token::OrOr) => {
+                // A `|` or possibly `||` token shouldn't be here. Ban it.
+                self.dcx().emit_err(TrailingVertNotAllowed {
+                    span: self.token.span,
+                    start: lo,
+                    token: self.token,
+                    note_double_vert: self.token.kind == token::OrOr,
+                });
+                self.bump();
+                true
+            }
+            _ => false,
+        }
+    }
+
+    /// Ensures that the last parsed pattern (or pattern range bound) is not followed by an expression.
+    ///
+    /// `is_end_bound` indicates whether the last parsed thing was the end bound of a range pattern (see [`parse_pat_range_end`](Self::parse_pat_range_end))
+    /// in order to say "expected a pattern range bound" instead of "expected a pattern";
+    /// ```text
+    /// 0..=1 + 2
+    ///     ^^^^^
+    /// ```
+    /// Only the end bound is spanned in this case, and this function has no idea if there was a `..=` before `pat_span`, hence the parameter.
+    ///
+    /// This function returns `Some` if a trailing expression was recovered, and said expression's span.
+    #[must_use = "the pattern must be discarded as `PatKind::Err` if this function returns Some"]
+    fn maybe_recover_trailing_expr(
+        &mut self,
+        pat_span: Span,
+        is_end_bound: bool,
+    ) -> Option<(ErrorGuaranteed, Span)> {
+        if self.prev_token.is_keyword(kw::Underscore) || !self.may_recover() {
+            // Don't recover anything after an `_` or if recovery is disabled.
+            return None;
+        }
+
+        // Returns `true` iff `token` is an unsuffixed integer.
+        let is_one_tuple_index = |_: &Self, token: &Token| -> bool {
+            use token::{Lit, LitKind};
+
+            matches!(
+                token.kind,
+                token::Literal(Lit { kind: LitKind::Integer, symbol: _, suffix: None })
+            )
+        };
+
+        // Returns `true` iff `token` is an unsuffixed `x.y` float.
+        let is_two_tuple_indexes = |this: &Self, token: &Token| -> bool {
+            use token::{Lit, LitKind};
+
+            if let token::Literal(Lit { kind: LitKind::Float, symbol, suffix: None }) = token.kind
+                && let DestructuredFloat::MiddleDot(..) = this.break_up_float(symbol, token.span)
+            {
+                true
+            } else {
+                false
+            }
+        };
+
+        // Check for `.hello` or `.0`.
+        let has_dot_expr = self.check_noexpect(&token::Dot) // `.`
+            && self.look_ahead(1, |tok| {
+                tok.is_ident() // `hello`
+                || is_one_tuple_index(&self, &tok) // `0`
+                || is_two_tuple_indexes(&self, &tok) // `0.0`
+            });
+
+        // Check for operators.
+        // `|` is excluded as it is used in pattern alternatives and lambdas,
+        // `?` is included for error propagation,
+        // `[` is included for indexing operations,
+        // `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`),
+        // `as` is included for type casts
+        let has_trailing_operator = matches!(
+                self.token.kind,
+                token::Plus | token::Minus | token::Star | token::Slash | token::Percent
+                | token::Caret | token::And | token::Shl | token::Shr // excludes `Or`
+            )
+            || self.token == token::Question
+            || (self.token == token::OpenBracket
+                && self.look_ahead(1, |t| *t != token::CloseBracket)) // excludes `[]`
+            || self.token.is_keyword(kw::As);
+
+        if !has_dot_expr && !has_trailing_operator {
+            // Nothing to recover here.
+            return None;
+        }
+
+        // Let's try to parse an expression to emit a better diagnostic.
+        let mut snapshot = self.create_snapshot_for_diagnostic();
+        snapshot.restrictions.insert(Restrictions::IS_PAT);
+
+        // Parse `?`, `.f`, `(arg0, arg1, ...)` or `[expr]` until they've all been eaten.
+        let Ok(expr) = snapshot
+            .parse_expr_dot_or_call_with(
+                AttrVec::new(),
+                self.mk_expr(pat_span, ExprKind::Dummy), // equivalent to transforming the parsed pattern into an `Expr`
+                pat_span,
+            )
+            .map_err(|err| err.cancel())
+        else {
+            // We got a trailing method/operator, but that wasn't an expression.
+            return None;
+        };
+
+        // Parse an associative expression such as `+ expr`, `% expr`, ...
+        // Assignments, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
+        let Ok((expr, _)) = snapshot
+            .parse_expr_assoc_rest_with(Bound::Unbounded, false, expr)
+            .map_err(|err| err.cancel())
+        else {
+            // We got a trailing method/operator, but that wasn't an expression.
+            return None;
+        };
+
+        // We got a valid expression.
+        self.restore_snapshot(snapshot);
+        self.restrictions.remove(Restrictions::IS_PAT);
+
+        let is_bound = is_end_bound
+            // is_start_bound: either `..` or `)..`
+            || self.token.is_range_separator()
+            || self.token == token::CloseParen
+                && self.look_ahead(1, Token::is_range_separator);
+
+        let span = expr.span;
+
+        Some((
+            self.dcx()
+                .create_err(UnexpectedExpressionInPattern {
+                    span,
+                    is_bound,
+                    expr_precedence: expr.precedence(),
+                })
+                .stash(span, StashKey::ExprInPat)
+                .unwrap(),
+            span,
+        ))
+    }
+
+    /// Called by [`Parser::parse_stmt_without_recovery`], used to add statement-aware subdiagnostics to the errors stashed
+    /// by [`Parser::maybe_recover_trailing_expr`].
+    pub(super) fn maybe_augment_stashed_expr_in_pats_with_suggestions(&mut self, stmt: &Stmt) {
+        if self.dcx().has_errors().is_none() {
+            // No need to walk the statement if there's no stashed errors.
+            return;
+        }
+
+        struct PatVisitor<'a> {
+            /// `self`
+            parser: &'a Parser<'a>,
+            /// The freshly-parsed statement.
+            stmt: &'a Stmt,
+            /// The current match arm (for arm guard suggestions).
+            arm: Option<&'a Arm>,
+            /// The current struct field (for variable name suggestions).
+            field: Option<&'a PatField>,
+        }
+
+        impl<'a> PatVisitor<'a> {
+            /// Looks for stashed [`StashKey::ExprInPat`] errors in `stash_span`, and emit them with suggestions.
+            /// `stash_span` is contained in `expr_span`, the latter being larger in borrow patterns;
+            /// ```txt
+            /// &mut x.y
+            /// -----^^^ `stash_span`
+            /// |
+            /// `expr_span`
+            /// ```
+            /// `is_range_bound` is used to exclude arm guard suggestions in range pattern bounds.
+            fn maybe_add_suggestions_then_emit(
+                &self,
+                stash_span: Span,
+                expr_span: Span,
+                is_range_bound: bool,
+            ) {
+                self.parser.dcx().try_steal_modify_and_emit_err(
+                    stash_span,
+                    StashKey::ExprInPat,
+                    |err| {
+                        // Includes pre-pats (e.g. `&mut <err>`) in the diagnostic.
+                        err.span.replace(stash_span, expr_span);
+
+                        let sm = self.parser.psess.source_map();
+                        let stmt = self.stmt;
+                        let line_lo = sm.span_extend_to_line(stmt.span).shrink_to_lo();
+                        let indentation = sm.indentation_before(stmt.span).unwrap_or_default();
+                        let Ok(expr) = self.parser.span_to_snippet(expr_span) else {
+                            // FIXME: some suggestions don't actually need the snippet; see PR #123877's unresolved conversations.
+                            return;
+                        };
+
+                        if let StmtKind::Let(local) = &stmt.kind {
+                            match &local.kind {
+                                LocalKind::Decl | LocalKind::Init(_) => {
+                                    // It's kinda hard to guess what the user intended, so don't make suggestions.
+                                    return;
+                                }
+
+                                LocalKind::InitElse(_, _) => {}
+                            }
+                        }
+
+                        // help: use an arm guard `if val == expr`
+                        // FIXME(guard_patterns): suggest this regardless of a match arm.
+                        if let Some(arm) = &self.arm
+                            && !is_range_bound
+                        {
+                            let (ident, ident_span) = match self.field {
+                                Some(field) => {
+                                    (field.ident.to_string(), field.ident.span.to(expr_span))
+                                }
+                                None => ("val".to_owned(), expr_span),
+                            };
+
+                            // Are parentheses required around `expr`?
+                            // HACK: a neater way would be preferable.
+                            let expr = match &err.args["expr_precedence"] {
+                                DiagArgValue::Number(expr_precedence) => {
+                                    if *expr_precedence <= ExprPrecedence::Compare as i32 {
+                                        format!("({expr})")
+                                    } else {
+                                        format!("{expr}")
+                                    }
+                                }
+                                _ => unreachable!(),
+                            };
+
+                            match &arm.guard {
+                                None => {
+                                    err.subdiagnostic(
+                                        UnexpectedExpressionInPatternSugg::CreateGuard {
+                                            ident_span,
+                                            pat_hi: arm.pat.span.shrink_to_hi(),
+                                            ident,
+                                            expr,
+                                        },
+                                    );
+                                }
+                                Some(guard) => {
+                                    // Are parentheses required around the old guard?
+                                    let wrap_guard = guard.precedence() <= ExprPrecedence::LAnd;
+
+                                    err.subdiagnostic(
+                                        UnexpectedExpressionInPatternSugg::UpdateGuard {
+                                            ident_span,
+                                            guard_lo: if wrap_guard {
+                                                Some(guard.span.shrink_to_lo())
+                                            } else {
+                                                None
+                                            },
+                                            guard_hi: guard.span.shrink_to_hi(),
+                                            guard_hi_paren: if wrap_guard { ")" } else { "" },
+                                            ident,
+                                            expr,
+                                        },
+                                    );
+                                }
+                            }
+                        }
+
+                        // help: extract the expr into a `const VAL: _ = expr`
+                        let ident = match self.field {
+                            Some(field) => field.ident.as_str().to_uppercase(),
+                            None => "VAL".to_owned(),
+                        };
+                        err.subdiagnostic(UnexpectedExpressionInPatternSugg::Const {
+                            stmt_lo: line_lo,
+                            ident_span: expr_span,
+                            expr,
+                            ident,
+                            indentation,
+                        });
+                    },
+                );
+            }
+        }
+
+        impl<'a> Visitor<'a> for PatVisitor<'a> {
+            fn visit_arm(&mut self, a: &'a Arm) -> Self::Result {
+                self.arm = Some(a);
+                visit::walk_arm(self, a);
+                self.arm = None;
+            }
+
+            fn visit_pat_field(&mut self, fp: &'a PatField) -> Self::Result {
+                self.field = Some(fp);
+                visit::walk_pat_field(self, fp);
+                self.field = None;
+            }
+
+            fn visit_pat(&mut self, p: &'a Pat) -> Self::Result {
+                match &p.kind {
+                    // Base expression
+                    PatKind::Err(_) | PatKind::Expr(_) => {
+                        self.maybe_add_suggestions_then_emit(p.span, p.span, false)
+                    }
+
+                    // Sub-patterns
+                    // FIXME: this doesn't work with recursive subpats (`&mut &mut <err>`)
+                    PatKind::Box(subpat) | PatKind::Ref(subpat, _)
+                        if matches!(subpat.kind, PatKind::Err(_) | PatKind::Expr(_)) =>
+                    {
+                        self.maybe_add_suggestions_then_emit(subpat.span, p.span, false)
+                    }
+
+                    // Sub-expressions
+                    PatKind::Range(start, end, _) => {
+                        if let Some(start) = start {
+                            self.maybe_add_suggestions_then_emit(start.span, start.span, true);
+                        }
+
+                        if let Some(end) = end {
+                            self.maybe_add_suggestions_then_emit(end.span, end.span, true);
+                        }
+                    }
+
+                    // Walk continuation
+                    _ => visit::walk_pat(self, p),
+                }
+            }
+        }
+
+        // Starts the visit.
+        PatVisitor { parser: self, stmt, arm: None, field: None }.visit_stmt(stmt);
+    }
+
+    fn eat_metavar_pat(&mut self) -> Option<P<Pat>> {
+        // Must try both kinds of pattern nonterminals.
+        if let Some(pat) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Pat(PatParam { .. })),
+            |this| this.parse_pat_no_top_alt(None, None),
+        ) {
+            Some(pat)
+        } else if let Some(pat) = self.eat_metavar_seq(MetaVarKind::Pat(PatWithOr), |this| {
+            this.parse_pat_no_top_guard(
+                None,
+                RecoverComma::No,
+                RecoverColon::No,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )
+        }) {
+            Some(pat)
+        } else {
+            None
+        }
+    }
+
+    /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
+    /// allowed).
+    fn parse_pat_with_range_pat(
+        &mut self,
+        allow_range_pat: bool,
+        expected: Option<Expected>,
+        syntax_loc: Option<PatternLocation>,
+    ) -> PResult<'a, P<Pat>> {
+        maybe_recover_from_interpolated_ty_qpath!(self, true);
+
+        if let Some(pat) = self.eat_metavar_pat() {
+            return Ok(pat);
+        }
+
+        let mut lo = self.token.span;
+
+        if self.token.is_keyword(kw::Let)
+            && self.look_ahead(1, |tok| {
+                tok.can_begin_pattern(token::NtPatKind::PatParam { inferred: false })
+            })
+        {
+            self.bump();
+            // Trim extra space after the `let`
+            let span = lo.with_hi(self.token.span.lo());
+            self.dcx().emit_err(RemoveLet { span: lo, suggestion: span });
+            lo = self.token.span;
+        }
+
+        let pat = if self.check(exp!(And)) || self.token == token::AndAnd {
+            self.parse_pat_deref(expected)?
+        } else if self.check(exp!(OpenParen)) {
+            self.parse_pat_tuple_or_parens()?
+        } else if self.check(exp!(OpenBracket)) {
+            // Parse `[pat, pat,...]` as a slice pattern.
+            let (pats, _) =
+                self.parse_delim_comma_seq(exp!(OpenBracket), exp!(CloseBracket), |p| {
+                    p.parse_pat_allow_top_guard(
+                        None,
+                        RecoverComma::No,
+                        RecoverColon::No,
+                        CommaRecoveryMode::EitherTupleOrPipe,
+                    )
+                })?;
+            PatKind::Slice(pats)
+        } else if self.check(exp!(DotDot)) && !self.is_pat_range_end_start(1) {
+            // A rest pattern `..`.
+            self.bump(); // `..`
+            PatKind::Rest
+        } else if self.check(exp!(DotDotDot)) && !self.is_pat_range_end_start(1) {
+            self.recover_dotdotdot_rest_pat(lo)
+        } else if let Some(form) = self.parse_range_end() {
+            self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
+        } else if self.eat(exp!(Bang)) {
+            // Parse `!`
+            self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
+            PatKind::Never
+        } else if self.eat_keyword(exp!(Underscore)) {
+            // Parse `_`
+            PatKind::Wild
+        } else if self.eat_keyword(exp!(Mut)) {
+            self.parse_pat_ident_mut()?
+        } else if self.eat_keyword(exp!(Ref)) {
+            if self.check_keyword(exp!(Box)) {
+                // Suggest `box ref`.
+                let span = self.prev_token.span.to(self.token.span);
+                self.bump();
+                self.dcx().emit_err(SwitchRefBoxOrder { span });
+            }
+            // Parse ref ident @ pat / ref mut ident @ pat
+            let mutbl = self.parse_mutability();
+            self.parse_pat_ident(BindingMode(ByRef::Yes(mutbl), Mutability::Not), syntax_loc)?
+        } else if self.eat_keyword(exp!(Box)) {
+            self.parse_pat_box()?
+        } else if self.check_inline_const(0) {
+            // Parse `const pat`
+            let const_expr = self.parse_const_block(lo.to(self.token.span), true)?;
+
+            if let Some(re) = self.parse_range_end() {
+                self.parse_pat_range_begin_with(const_expr, re)?
+            } else {
+                PatKind::Expr(const_expr)
+            }
+        } else if self.is_builtin() {
+            self.parse_pat_builtin()?
+        }
+        // Don't eagerly error on semantically invalid tokens when matching
+        // declarative macros, as the input to those doesn't have to be
+        // semantically valid. For attribute/derive proc macros this is not the
+        // case, so doing the recovery for them is fine.
+        else if self.can_be_ident_pat()
+            || (self.is_lit_bad_ident().is_some() && self.may_recover())
+        {
+            // Parse `ident @ pat`
+            // This can give false positives and parse nullary enums,
+            // they are dealt with later in resolve.
+            self.parse_pat_ident(BindingMode::NONE, syntax_loc)?
+        } else if self.is_start_of_pat_with_path() {
+            // Parse pattern starting with a path
+            let (qself, path) = if self.eat_lt() {
+                // Parse a qualified path
+                let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
+                (Some(qself), path)
+            } else {
+                // Parse an unqualified path
+                (None, self.parse_path(PathStyle::Pat)?)
+            };
+            let span = lo.to(self.prev_token.span);
+
+            if qself.is_none() && self.check(exp!(Bang)) {
+                self.parse_pat_mac_invoc(path)?
+            } else if let Some(form) = self.parse_range_end() {
+                let begin = self.mk_expr(span, ExprKind::Path(qself, path));
+                self.parse_pat_range_begin_with(begin, form)?
+            } else if self.check(exp!(OpenBrace)) {
+                self.parse_pat_struct(qself, path)?
+            } else if self.check(exp!(OpenParen)) {
+                self.parse_pat_tuple_struct(qself, path)?
+            } else {
+                match self.maybe_recover_trailing_expr(span, false) {
+                    Some((guar, _)) => PatKind::Err(guar),
+                    None => PatKind::Path(qself, path),
+                }
+            }
+        } else if let Some((lt, IdentIsRaw::No)) = self.token.lifetime()
+            // In pattern position, we're totally fine with using "next token isn't colon"
+            // as a heuristic. We could probably just always try to recover if it's a lifetime,
+            // because we never have `'a: label {}` in a pattern position anyways, but it does
+            // keep us from suggesting something like `let 'a: Ty = ..` => `let 'a': Ty = ..`
+            && could_be_unclosed_char_literal(lt)
+            && !self.look_ahead(1, |token| token.kind == token::Colon)
+        {
+            // Recover a `'a` as a `'a'` literal
+            let lt = self.expect_lifetime();
+            let (lit, _) =
+                self.recover_unclosed_char(lt.ident, Parser::mk_token_lit_char, |self_| {
+                    let expected = Expected::to_string_or_fallback(expected);
+                    let msg = format!(
+                        "expected {}, found {}",
+                        expected,
+                        super::token_descr(&self_.token)
+                    );
+
+                    self_
+                        .dcx()
+                        .struct_span_err(self_.token.span, msg)
+                        .with_span_label(self_.token.span, format!("expected {expected}"))
+                });
+            PatKind::Expr(self.mk_expr(lo, ExprKind::Lit(lit)))
+        } else {
+            // Try to parse everything else as literal with optional minus
+            match self.parse_literal_maybe_minus() {
+                Ok(begin) => {
+                    let begin = self
+                        .maybe_recover_trailing_expr(begin.span, false)
+                        .map(|(guar, sp)| self.mk_expr_err(sp, guar))
+                        .unwrap_or(begin);
+
+                    match self.parse_range_end() {
+                        Some(form) => self.parse_pat_range_begin_with(begin, form)?,
+                        None => PatKind::Expr(begin),
+                    }
+                }
+                Err(err) => return self.fatal_unexpected_non_pat(err, expected),
+            }
+        };
+
+        let pat = self.mk_pat(lo.to(self.prev_token.span), pat);
+        let pat = self.maybe_recover_from_bad_qpath(pat)?;
+        let pat = self.recover_intersection_pat(pat)?;
+
+        if !allow_range_pat {
+            self.ban_pat_range_if_ambiguous(&pat)
+        }
+
+        Ok(pat)
+    }
+
+    /// Recover from a typoed `...` pattern that was encountered
+    /// Ref: Issue #70388
+    fn recover_dotdotdot_rest_pat(&mut self, lo: Span) -> PatKind {
+        // A typoed rest pattern `...`.
+        self.bump(); // `...`
+
+        // The user probably mistook `...` for a rest pattern `..`.
+        self.dcx().emit_err(DotDotDotRestPattern {
+            span: lo,
+            suggestion: lo.with_lo(lo.hi() - BytePos(1)),
+        });
+        PatKind::Rest
+    }
+
+    /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`.
+    ///
+    /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs`
+    /// should already have been parsed by now at this point,
+    /// if the next token is `@` then we can try to parse the more general form.
+    ///
+    /// Consult `parse_pat_ident` for the `binding` grammar.
+    ///
+    /// The notion of intersection patterns are found in
+    /// e.g. [F#][and] where they are called AND-patterns.
+    ///
+    /// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching
+    fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> {
+        if self.token != token::At {
+            // Next token is not `@` so it's not going to be an intersection pattern.
+            return Ok(lhs);
+        }
+
+        // At this point we attempt to parse `@ $pat_rhs` and emit an error.
+        self.bump(); // `@`
+        let mut rhs = self.parse_pat_no_top_alt(None, None)?;
+        let whole_span = lhs.span.to(rhs.span);
+
+        if let PatKind::Ident(_, _, sub @ None) = &mut rhs.kind {
+            // The user inverted the order, so help them fix that.
+            let lhs_span = lhs.span;
+            // Move the LHS into the RHS as a subpattern.
+            // The RHS is now the full pattern.
+            *sub = Some(lhs);
+
+            self.dcx().emit_err(PatternOnWrongSideOfAt {
+                whole_span,
+                whole_pat: pprust::pat_to_string(&rhs),
+                pattern: lhs_span,
+                binding: rhs.span,
+            });
+        } else {
+            // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
+            rhs.kind = PatKind::Wild;
+            self.dcx().emit_err(ExpectedBindingLeftOfAt {
+                whole_span,
+                lhs: lhs.span,
+                rhs: rhs.span,
+            });
+        }
+
+        rhs.span = whole_span;
+        Ok(rhs)
+    }
+
+    /// Ban a range pattern if it has an ambiguous interpretation.
+    fn ban_pat_range_if_ambiguous(&self, pat: &Pat) {
+        match pat.kind {
+            PatKind::Range(
+                ..,
+                Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. },
+            ) => return,
+            PatKind::Range(..) => {}
+            _ => return,
+        }
+
+        self.dcx().emit_err(AmbiguousRangePattern {
+            span: pat.span,
+            suggestion: ParenRangeSuggestion {
+                lo: pat.span.shrink_to_lo(),
+                hi: pat.span.shrink_to_hi(),
+            },
+        });
+    }
+
+    /// Parse `&pat` / `&mut pat`.
+    fn parse_pat_deref(&mut self, expected: Option<Expected>) -> PResult<'a, PatKind> {
+        self.expect_and()?;
+        if let Some((lifetime, _)) = self.token.lifetime() {
+            self.bump(); // `'a`
+
+            self.dcx().emit_err(UnexpectedLifetimeInPattern {
+                span: self.prev_token.span,
+                symbol: lifetime.name,
+                suggestion: self.prev_token.span.until(self.token.span),
+            });
+        }
+
+        let mutbl = self.parse_mutability();
+        let subpat = self.parse_pat_with_range_pat(false, expected, None)?;
+        Ok(PatKind::Ref(subpat, mutbl))
+    }
+
+    /// Parse a tuple or parenthesis pattern.
+    fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
+        let open_paren = self.token.span;
+
+        let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
+            p.parse_pat_allow_top_guard(
+                None,
+                RecoverComma::No,
+                RecoverColon::No,
+                CommaRecoveryMode::LikelyTuple,
+            )
+        })?;
+
+        // Here, `(pat,)` is a tuple pattern.
+        // For backward compatibility, `(..)` is a tuple pattern as well.
+        let paren_pattern =
+            fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest());
+
+        let pat = if paren_pattern {
+            let pat = fields.into_iter().next().unwrap();
+            let close_paren = self.prev_token.span;
+
+            match &pat.kind {
+                // recover ranges with parentheses around the `(start)..`
+                PatKind::Expr(begin)
+                    if self.may_recover()
+                        && let Some(form) = self.parse_range_end() =>
+                {
+                    self.dcx().emit_err(UnexpectedParenInRangePat {
+                        span: vec![open_paren, close_paren],
+                        sugg: UnexpectedParenInRangePatSugg {
+                            start_span: open_paren,
+                            end_span: close_paren,
+                        },
+                    });
+
+                    self.parse_pat_range_begin_with(begin.clone(), form)?
+                }
+                // recover ranges with parentheses around the `(start)..`
+                PatKind::Err(guar)
+                    if self.may_recover()
+                        && let Some(form) = self.parse_range_end() =>
+                {
+                    self.dcx().emit_err(UnexpectedParenInRangePat {
+                        span: vec![open_paren, close_paren],
+                        sugg: UnexpectedParenInRangePatSugg {
+                            start_span: open_paren,
+                            end_span: close_paren,
+                        },
+                    });
+
+                    self.parse_pat_range_begin_with(self.mk_expr_err(pat.span, *guar), form)?
+                }
+
+                // (pat) with optional parentheses
+                _ => PatKind::Paren(pat),
+            }
+        } else {
+            PatKind::Tuple(fields)
+        };
+
+        Ok(match self.maybe_recover_trailing_expr(open_paren.to(self.prev_token.span), false) {
+            None => pat,
+            Some((guar, _)) => PatKind::Err(guar),
+        })
+    }
+
+    /// Parse a mutable binding with the `mut` token already eaten.
+    fn parse_pat_ident_mut(&mut self) -> PResult<'a, PatKind> {
+        let mut_span = self.prev_token.span;
+
+        self.recover_additional_muts();
+
+        let byref = self.parse_byref();
+
+        self.recover_additional_muts();
+
+        // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
+        if let Some(MetaVarKind::Pat(_)) = self.token.is_metavar_seq() {
+            self.expected_ident_found_err().emit();
+        }
+
+        // Parse the pattern we hope to be an identifier.
+        let mut pat = self.parse_pat_no_top_alt(Some(Expected::Identifier), None)?;
+
+        // If we don't have `mut $ident (@ pat)?`, error.
+        if let PatKind::Ident(BindingMode(br @ ByRef::No, m @ Mutability::Not), ..) = &mut pat.kind
+        {
+            // Don't recurse into the subpattern.
+            // `mut` on the outer binding doesn't affect the inner bindings.
+            *br = byref;
+            *m = Mutability::Mut;
+        } else {
+            // Add `mut` to any binding in the parsed pattern.
+            let changed_any_binding = Self::make_all_value_bindings_mutable(&mut pat);
+            self.ban_mut_general_pat(mut_span, &pat, changed_any_binding);
+        }
+
+        if matches!(pat.kind, PatKind::Ident(BindingMode(ByRef::Yes(_), Mutability::Mut), ..)) {
+            self.psess.gated_spans.gate(sym::mut_ref, pat.span);
+        }
+        Ok(pat.kind)
+    }
+
+    /// Turn all by-value immutable bindings in a pattern into mutable bindings.
+    /// Returns `true` if any change was made.
+    fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
+        struct AddMut(bool);
+        impl MutVisitor for AddMut {
+            fn visit_pat(&mut self, pat: &mut Pat) {
+                if let PatKind::Ident(BindingMode(ByRef::No, m @ Mutability::Not), ..) =
+                    &mut pat.kind
+                {
+                    self.0 = true;
+                    *m = Mutability::Mut;
+                }
+                mut_visit::walk_pat(self, pat);
+            }
+        }
+
+        let mut add_mut = AddMut(false);
+        add_mut.visit_pat(pat);
+        add_mut.0
+    }
+
+    /// Error on `mut $pat` where `$pat` is not an ident.
+    fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
+        self.dcx().emit_err(if changed_any_binding {
+            InvalidMutInPattern::NestedIdent {
+                span: lo.to(pat.span),
+                pat: pprust::pat_to_string(pat),
+            }
+        } else {
+            InvalidMutInPattern::NonIdent { span: lo.until(pat.span) }
+        });
+    }
+
+    /// Eat any extraneous `mut`s and error + recover if we ate any.
+    fn recover_additional_muts(&mut self) {
+        let lo = self.token.span;
+        while self.eat_keyword(exp!(Mut)) {}
+        if lo == self.token.span {
+            return;
+        }
+
+        let span = lo.to(self.prev_token.span);
+        let suggestion = span.with_hi(self.token.span.lo());
+        self.dcx().emit_err(RepeatedMutInPattern { span, suggestion });
+    }
+
+    /// Parse macro invocation
+    fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
+        self.bump();
+        let args = self.parse_delim_args()?;
+        let mac = P(MacCall { path, args });
+        Ok(PatKind::MacCall(mac))
+    }
+
+    fn fatal_unexpected_non_pat(
+        &mut self,
+        err: Diag<'a>,
+        expected: Option<Expected>,
+    ) -> PResult<'a, P<Pat>> {
+        err.cancel();
+
+        let expected = Expected::to_string_or_fallback(expected);
+        let msg = format!("expected {}, found {}", expected, super::token_descr(&self.token));
+
+        let mut err = self.dcx().struct_span_err(self.token.span, msg);
+        err.span_label(self.token.span, format!("expected {expected}"));
+
+        let sp = self.psess.source_map().start_point(self.token.span);
+        if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+        }
+
+        Err(err)
+    }
+
+    /// Parses the range pattern end form `".." | "..." | "..=" ;`.
+    fn parse_range_end(&mut self) -> Option<Spanned<RangeEnd>> {
+        let re = if self.eat(exp!(DotDotDot)) {
+            RangeEnd::Included(RangeSyntax::DotDotDot)
+        } else if self.eat(exp!(DotDotEq)) {
+            RangeEnd::Included(RangeSyntax::DotDotEq)
+        } else if self.eat(exp!(DotDot)) {
+            RangeEnd::Excluded
+        } else {
+            return None;
+        };
+        Some(respan(self.prev_token.span, re))
+    }
+
+    /// Parse a range pattern `$begin $form $end?` where `$form = ".." | "..." | "..=" ;`.
+    /// `$begin $form` has already been parsed.
+    fn parse_pat_range_begin_with(
+        &mut self,
+        begin: P<Expr>,
+        re: Spanned<RangeEnd>,
+    ) -> PResult<'a, PatKind> {
+        let end = if self.is_pat_range_end_start(0) {
+            // Parsing e.g. `X..=Y`.
+            Some(self.parse_pat_range_end()?)
+        } else {
+            // Parsing e.g. `X..`.
+            if let RangeEnd::Included(_) = re.node {
+                // FIXME(Centril): Consider semantic errors instead in `ast_validation`.
+                self.inclusive_range_with_incorrect_end();
+            }
+            None
+        };
+        Ok(PatKind::Range(Some(begin), end, re))
+    }
+
+    pub(super) fn inclusive_range_with_incorrect_end(&mut self) -> ErrorGuaranteed {
+        let tok = &self.token;
+        let span = self.prev_token.span;
+        // If the user typed "..==" instead of "..=", we want to give them
+        // a specific error message telling them to use "..=".
+        // If they typed "..=>", suggest they use ".. =>".
+        // Otherwise, we assume that they meant to type a half open exclusive
+        // range and give them an error telling them to do that instead.
+        let no_space = tok.span.lo() == span.hi();
+        match tok.kind {
+            token::Eq if no_space => {
+                let span_with_eq = span.to(tok.span);
+
+                // Ensure the user doesn't receive unhelpful unexpected token errors
+                self.bump();
+                if self.is_pat_range_end_start(0) {
+                    let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
+                }
+
+                self.dcx().emit_err(InclusiveRangeExtraEquals { span: span_with_eq })
+            }
+            token::Gt if no_space => {
+                let after_pat = span.with_hi(span.hi() - BytePos(1)).shrink_to_hi();
+                self.dcx().emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat })
+            }
+            _ => self.dcx().emit_err(InclusiveRangeNoEnd {
+                span,
+                suggestion: span.with_lo(span.hi() - BytePos(1)),
+            }),
+        }
+    }
+
+    /// Parse a range-to pattern, `..X` or `..=X` where `X` remains to be parsed.
+    ///
+    /// The form `...X` is prohibited to reduce confusion with the potential
+    /// expression syntax `...expr` for splatting in expressions.
+    fn parse_pat_range_to(&mut self, mut re: Spanned<RangeEnd>) -> PResult<'a, PatKind> {
+        let end = self.parse_pat_range_end()?;
+        if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
+            *syn = RangeSyntax::DotDotEq;
+            self.dcx().emit_err(DotDotDotRangeToPatternNotAllowed { span: re.span });
+        }
+        Ok(PatKind::Range(None, Some(end), re))
+    }
+
+    /// Is the token `dist` away from the current suitable as the start of a range patterns end?
+    fn is_pat_range_end_start(&self, dist: usize) -> bool {
+        self.check_inline_const(dist)
+            || self.look_ahead(dist, |t| {
+                t.is_path_start() // e.g. `MY_CONST`;
+                || *t == token::Dot // e.g. `.5` for recovery;
+                || matches!(t.kind, token::Literal(..) | token::Minus)
+                || t.is_bool_lit()
+                || t.is_metavar_expr()
+                || t.is_lifetime() // recover `'a` instead of `'a'`
+                || (self.may_recover() // recover leading `(`
+                    && *t == token::OpenParen
+                    && self.look_ahead(dist + 1, |t| *t != token::OpenParen)
+                    && self.is_pat_range_end_start(dist + 1))
+            })
+    }
+
+    /// Parse a range pattern end bound
+    fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
+        // recover leading `(`
+        let open_paren = (self.may_recover() && self.eat_noexpect(&token::OpenParen))
+            .then_some(self.prev_token.span);
+
+        let bound = if self.check_inline_const(0) {
+            self.parse_const_block(self.token.span, true)
+        } else if self.check_path() {
+            let lo = self.token.span;
+            let (qself, path) = if self.eat_lt() {
+                // Parse a qualified path
+                let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
+                (Some(qself), path)
+            } else {
+                // Parse an unqualified path
+                (None, self.parse_path(PathStyle::Pat)?)
+            };
+            let hi = self.prev_token.span;
+            Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
+        } else {
+            self.parse_literal_maybe_minus()
+        }?;
+
+        let recovered = self.maybe_recover_trailing_expr(bound.span, true);
+
+        // recover trailing `)`
+        if let Some(open_paren) = open_paren {
+            self.expect(exp!(CloseParen))?;
+
+            self.dcx().emit_err(UnexpectedParenInRangePat {
+                span: vec![open_paren, self.prev_token.span],
+                sugg: UnexpectedParenInRangePatSugg {
+                    start_span: open_paren,
+                    end_span: self.prev_token.span,
+                },
+            });
+        }
+
+        Ok(match recovered {
+            Some((guar, sp)) => self.mk_expr_err(sp, guar),
+            None => bound,
+        })
+    }
+
+    /// Is this the start of a pattern beginning with a path?
+    fn is_start_of_pat_with_path(&mut self) -> bool {
+        self.check_path()
+        // Just for recovery (see `can_be_ident`).
+        || self.token.is_ident() && !self.token.is_bool_lit() && !self.token.is_keyword(kw::In)
+    }
+
+    /// Would `parse_pat_ident` be appropriate here?
+    fn can_be_ident_pat(&mut self) -> bool {
+        self.check_ident()
+        && !self.token.is_bool_lit() // Avoid `true` or `false` as a binding as it is a literal.
+        && !self.token.is_path_segment_keyword() // Avoid e.g. `Self` as it is a path.
+        // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`.
+        && !self.token.is_keyword(kw::In)
+        // Try to do something more complex?
+        && self.look_ahead(1, |t| !matches!(t.kind, token::OpenParen // A tuple struct pattern.
+            | token::OpenBrace // A struct pattern.
+            | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
+            | token::PathSep // A tuple / struct variant pattern.
+            | token::Bang)) // A macro expanding to a pattern.
+    }
+
+    /// Parses `ident` or `ident @ pat`.
+    /// Used by the copy foo and ref foo patterns to give a good
+    /// error message when parsing mistakes like `ref foo(a, b)`.
+    fn parse_pat_ident(
+        &mut self,
+        binding_annotation: BindingMode,
+        syntax_loc: Option<PatternLocation>,
+    ) -> PResult<'a, PatKind> {
+        let ident = self.parse_ident_common(false)?;
+
+        if self.may_recover()
+            && !matches!(syntax_loc, Some(PatternLocation::FunctionParameter))
+            && self.check_noexpect(&token::Lt)
+            && self.look_ahead(1, |t| t.can_begin_type())
+        {
+            return Err(self.dcx().create_err(GenericArgsInPatRequireTurbofishSyntax {
+                span: self.token.span,
+                suggest_turbofish: self.token.span.shrink_to_lo(),
+            }));
+        }
+
+        let sub = if self.eat(exp!(At)) {
+            Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
+        } else {
+            None
+        };
+
+        // Just to be friendly, if they write something like `ref Some(i)`,
+        // we end up here with `(` as the current token.
+        // This shortly leads to a parse error. Note that if there is no explicit
+        // binding mode then we do not end up here, because the lookahead
+        // will direct us over to `parse_enum_variant()`.
+        if self.token == token::OpenParen {
+            return Err(self
+                .dcx()
+                .create_err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }));
+        }
+
+        // Check for method calls after the `ident`,
+        // but not `ident @ subpat` as `subpat` was already checked and `ident` continues with `@`.
+
+        let pat = if sub.is_none()
+            && let Some((guar, _)) = self.maybe_recover_trailing_expr(ident.span, false)
+        {
+            PatKind::Err(guar)
+        } else {
+            PatKind::Ident(binding_annotation, ident, sub)
+        };
+        Ok(pat)
+    }
+
+    /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
+    fn parse_pat_struct(&mut self, qself: Option<P<QSelf>>, path: Path) -> PResult<'a, PatKind> {
+        if qself.is_some() {
+            // Feature gate the use of qualified paths in patterns
+            self.psess.gated_spans.gate(sym::more_qualified_paths, path.span);
+        }
+        self.bump();
+        let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
+            e.span_label(path.span, "while parsing the fields for this pattern");
+            let guar = e.emit();
+            self.recover_stmt();
+            // When recovering, pretend we had `Foo { .. }`, to avoid cascading errors.
+            (ThinVec::new(), PatFieldsRest::Recovered(guar))
+        });
+        self.bump();
+        Ok(PatKind::Struct(qself, path, fields, etc))
+    }
+
+    /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
+    fn parse_pat_tuple_struct(
+        &mut self,
+        qself: Option<P<QSelf>>,
+        path: Path,
+    ) -> PResult<'a, PatKind> {
+        let (fields, _) = self.parse_paren_comma_seq(|p| {
+            p.parse_pat_allow_top_guard(
+                None,
+                RecoverComma::No,
+                RecoverColon::No,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )
+        })?;
+        if qself.is_some() {
+            self.psess.gated_spans.gate(sym::more_qualified_paths, path.span);
+        }
+        Ok(PatKind::TupleStruct(qself, path, fields))
+    }
+
+    /// Are we sure this could not possibly be the start of a pattern?
+    ///
+    /// Currently, this only accounts for tokens that can follow identifiers
+    /// in patterns, but this can be extended as necessary.
+    fn isnt_pattern_start(&self) -> bool {
+        [
+            token::Eq,
+            token::Colon,
+            token::Comma,
+            token::Semi,
+            token::At,
+            token::OpenBrace,
+            token::CloseBrace,
+            token::CloseParen,
+        ]
+        .contains(&self.token.kind)
+    }
+
+    fn parse_pat_builtin(&mut self) -> PResult<'a, PatKind> {
+        self.parse_builtin(|self_, _lo, ident| {
+            Ok(match ident.name {
+                // builtin#deref(PAT)
+                sym::deref => Some(ast::PatKind::Deref(self_.parse_pat_allow_top_guard(
+                    None,
+                    RecoverComma::Yes,
+                    RecoverColon::Yes,
+                    CommaRecoveryMode::LikelyTuple,
+                )?)),
+                _ => None,
+            })
+        })
+    }
+
+    /// Parses `box pat`
+    fn parse_pat_box(&mut self) -> PResult<'a, PatKind> {
+        let box_span = self.prev_token.span;
+
+        if self.isnt_pattern_start() {
+            let descr = super::token_descr(&self.token);
+            self.dcx().emit_err(errors::BoxNotPat {
+                span: self.token.span,
+                kw: box_span,
+                lo: box_span.shrink_to_lo(),
+                descr,
+            });
+
+            // We cannot use `parse_pat_ident()` since it will complain `box`
+            // is not an identifier.
+            let sub = if self.eat(exp!(At)) {
+                Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
+            } else {
+                None
+            };
+
+            Ok(PatKind::Ident(BindingMode::NONE, Ident::new(kw::Box, box_span), sub))
+        } else {
+            let pat = self.parse_pat_with_range_pat(false, None, None)?;
+            self.psess.gated_spans.gate(sym::box_patterns, box_span.to(self.prev_token.span));
+            Ok(PatKind::Box(pat))
+        }
+    }
+
+    /// Parses the fields of a struct-like pattern.
+    fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, PatFieldsRest)> {
+        let mut fields: ThinVec<PatField> = ThinVec::new();
+        let mut etc = PatFieldsRest::None;
+        let mut ate_comma = true;
+        let mut delayed_err: Option<Diag<'a>> = None;
+        let mut first_etc_and_maybe_comma_span = None;
+        let mut last_non_comma_dotdot_span = None;
+
+        while self.token != token::CloseBrace {
+            // check that a comma comes after every field
+            if !ate_comma {
+                let err = if self.token == token::At {
+                    let prev_field = fields
+                        .last()
+                        .expect("Unreachable on first iteration, not empty otherwise")
+                        .ident;
+                    self.report_misplaced_at_in_struct_pat(prev_field)
+                } else {
+                    let mut err = self
+                        .dcx()
+                        .create_err(ExpectedCommaAfterPatternField { span: self.token.span });
+                    self.recover_misplaced_pattern_modifiers(&fields, &mut err);
+                    err
+                };
+                if let Some(delayed) = delayed_err {
+                    delayed.emit();
+                }
+                return Err(err);
+            }
+            ate_comma = false;
+
+            if self.check(exp!(DotDot))
+                || self.check_noexpect(&token::DotDotDot)
+                || self.check_keyword(exp!(Underscore))
+            {
+                etc = PatFieldsRest::Rest;
+                let mut etc_sp = self.token.span;
+                if first_etc_and_maybe_comma_span.is_none() {
+                    if let Some(comma_tok) =
+                        self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None })
+                    {
+                        let nw_span = self
+                            .psess
+                            .source_map()
+                            .span_extend_to_line(comma_tok.span)
+                            .trim_start(comma_tok.span.shrink_to_lo())
+                            .map(|s| self.psess.source_map().span_until_non_whitespace(s));
+                        first_etc_and_maybe_comma_span = nw_span.map(|s| etc_sp.to(s));
+                    } else {
+                        first_etc_and_maybe_comma_span =
+                            Some(self.psess.source_map().span_until_non_whitespace(etc_sp));
+                    }
+                }
+
+                self.recover_bad_dot_dot();
+                self.bump(); // `..` || `...` || `_`
+
+                if self.token == token::CloseBrace {
+                    break;
+                }
+                let token_str = super::token_descr(&self.token);
+                let msg = format!("expected `}}`, found {token_str}");
+                let mut err = self.dcx().struct_span_err(self.token.span, msg);
+
+                err.span_label(self.token.span, "expected `}`");
+                let mut comma_sp = None;
+                if self.token == token::Comma {
+                    // Issue #49257
+                    let nw_span =
+                        self.psess.source_map().span_until_non_whitespace(self.token.span);
+                    etc_sp = etc_sp.to(nw_span);
+                    err.span_label(
+                        etc_sp,
+                        "`..` must be at the end and cannot have a trailing comma",
+                    );
+                    comma_sp = Some(self.token.span);
+                    self.bump();
+                    ate_comma = true;
+                }
+
+                if self.token == token::CloseBrace {
+                    // If the struct looks otherwise well formed, recover and continue.
+                    if let Some(sp) = comma_sp {
+                        err.span_suggestion_short(
+                            sp,
+                            "remove this comma",
+                            "",
+                            Applicability::MachineApplicable,
+                        );
+                    }
+                    err.emit();
+                    break;
+                } else if self.token.is_ident() && ate_comma {
+                    // Accept fields coming after `..,`.
+                    // This way we avoid "pattern missing fields" errors afterwards.
+                    // We delay this error until the end in order to have a span for a
+                    // suggested fix.
+                    if let Some(delayed_err) = delayed_err {
+                        delayed_err.emit();
+                        return Err(err);
+                    } else {
+                        delayed_err = Some(err);
+                    }
+                } else {
+                    if let Some(err) = delayed_err {
+                        err.emit();
+                    }
+                    return Err(err);
+                }
+            }
+
+            let attrs = match self.parse_outer_attributes() {
+                Ok(attrs) => attrs,
+                Err(err) => {
+                    if let Some(delayed) = delayed_err {
+                        delayed.emit();
+                    }
+                    return Err(err);
+                }
+            };
+            let lo = self.token.span;
+
+            let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+                let field = match this.parse_pat_field(lo, attrs) {
+                    Ok(field) => Ok(field),
+                    Err(err) => {
+                        if let Some(delayed_err) = delayed_err.take() {
+                            delayed_err.emit();
+                        }
+                        return Err(err);
+                    }
+                }?;
+                ate_comma = this.eat(exp!(Comma));
+
+                last_non_comma_dotdot_span = Some(this.prev_token.span);
+
+                // We just ate a comma, so there's no need to capture a trailing token.
+                Ok((field, Trailing::No, UsePreAttrPos::No))
+            })?;
+
+            fields.push(field)
+        }
+
+        if let Some(mut err) = delayed_err {
+            if let Some(first_etc_span) = first_etc_and_maybe_comma_span {
+                if self.prev_token == token::DotDot {
+                    // We have `.., x, ..`.
+                    err.multipart_suggestion(
+                        "remove the starting `..`",
+                        vec![(first_etc_span, String::new())],
+                        Applicability::MachineApplicable,
+                    );
+                } else if let Some(last_non_comma_dotdot_span) = last_non_comma_dotdot_span {
+                    // We have `.., x`.
+                    err.multipart_suggestion(
+                        "move the `..` to the end of the field list",
+                        vec![
+                            (first_etc_span, String::new()),
+                            (
+                                self.token.span.to(last_non_comma_dotdot_span.shrink_to_hi()),
+                                format!("{} .. }}", if ate_comma { "" } else { "," }),
+                            ),
+                        ],
+                        Applicability::MachineApplicable,
+                    );
+                }
+            }
+            err.emit();
+        }
+        Ok((fields, etc))
+    }
+
+    #[deny(rustc::untranslatable_diagnostic)]
+    fn report_misplaced_at_in_struct_pat(&self, prev_field: Ident) -> Diag<'a> {
+        debug_assert_eq!(self.token, token::At);
+        let span = prev_field.span.to(self.token.span);
+        if let Some(dot_dot_span) =
+            self.look_ahead(1, |t| if t == &token::DotDot { Some(t.span) } else { None })
+        {
+            self.dcx().create_err(AtDotDotInStructPattern {
+                span: span.to(dot_dot_span),
+                remove: span.until(dot_dot_span),
+                ident: prev_field,
+            })
+        } else {
+            self.dcx().create_err(AtInStructPattern { span })
+        }
+    }
+
+    /// If the user writes `S { ref field: name }` instead of `S { field: ref name }`, we suggest
+    /// the correct code.
+    fn recover_misplaced_pattern_modifiers(&self, fields: &ThinVec<PatField>, err: &mut Diag<'a>) {
+        if let Some(last) = fields.iter().last()
+            && last.is_shorthand
+            && let PatKind::Ident(binding, ident, None) = last.pat.kind
+            && binding != BindingMode::NONE
+            && self.token == token::Colon
+            // We found `ref mut? ident:`, try to parse a `name,` or `name }`.
+            && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span))
+            && self.look_ahead(2, |t| {
+                t == &token::Comma || t == &token::CloseBrace
+            })
+        {
+            let span = last.pat.span.with_hi(ident.span.lo());
+            // We have `S { ref field: name }` instead of `S { field: ref name }`
+            err.multipart_suggestion(
+                "the pattern modifiers belong after the `:`",
+                vec![
+                    (span, String::new()),
+                    (name_span.shrink_to_lo(), binding.prefix_str().to_string()),
+                ],
+                Applicability::MachineApplicable,
+            );
+        }
+    }
+
+    /// Recover on `...` or `_` as if it were `..` to avoid further errors.
+    /// See issue #46718.
+    fn recover_bad_dot_dot(&self) {
+        if self.token == token::DotDot {
+            return;
+        }
+
+        let token_str = pprust::token_to_string(&self.token);
+        self.dcx().emit_err(DotDotDotForRemainingFields { span: self.token.span, token_str });
+    }
+
+    fn parse_pat_field(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, PatField> {
+        // Check if a colon exists one ahead. This means we're parsing a fieldname.
+        let hi;
+        let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
+            // Parsing a pattern of the form `fieldname: pat`.
+            let fieldname = self.parse_field_name()?;
+            self.bump();
+            let pat = self.parse_pat_allow_top_guard(
+                None,
+                RecoverComma::No,
+                RecoverColon::No,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )?;
+            hi = pat.span;
+            (pat, fieldname, false)
+        } else {
+            // Parsing a pattern of the form `(box) (ref) (mut) fieldname`.
+            let is_box = self.eat_keyword(exp!(Box));
+            let boxed_span = self.token.span;
+            let mutability = self.parse_mutability();
+            let by_ref = self.parse_byref();
+
+            let fieldname = self.parse_field_name()?;
+            hi = self.prev_token.span;
+            let ann = BindingMode(by_ref, mutability);
+            let fieldpat = self.mk_pat_ident(boxed_span.to(hi), ann, fieldname);
+            let subpat =
+                if is_box { self.mk_pat(lo.to(hi), PatKind::Box(fieldpat)) } else { fieldpat };
+            (subpat, fieldname, true)
+        };
+
+        Ok(PatField {
+            ident: fieldname,
+            pat: subpat,
+            is_shorthand,
+            attrs,
+            id: ast::DUMMY_NODE_ID,
+            span: lo.to(hi),
+            is_placeholder: false,
+        })
+    }
+
+    pub(super) fn mk_pat_ident(&self, span: Span, ann: BindingMode, ident: Ident) -> P<Pat> {
+        self.mk_pat(span, PatKind::Ident(ann, ident, None))
+    }
+
+    pub(super) fn mk_pat(&self, span: Span, kind: PatKind) -> P<Pat> {
+        P(Pat { kind, span, id: ast::DUMMY_NODE_ID, tokens: None })
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
new file mode 100644
index 00000000000..8e65ab99c5e
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -0,0 +1,1008 @@
+use std::mem;
+
+use ast::token::IdentIsRaw;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, MetaVarKind, Token, TokenKind};
+use rustc_ast::{
+    self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocItemConstraint,
+    AssocItemConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
+    Path, PathSegment, QSelf,
+};
+use rustc_errors::{Applicability, Diag, PResult};
+use rustc_span::{BytePos, Ident, Span, kw, sym};
+use thin_vec::ThinVec;
+use tracing::debug;
+
+use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
+use super::{Parser, Restrictions, TokenType};
+use crate::ast::{PatKind, TyKind};
+use crate::errors::{
+    self, AttributeOnEmptyType, AttributeOnGenericArg, FnPathFoundNamedParams,
+    PathFoundAttributeInParams, PathFoundCVariadicParams, PathSingleColon, PathTripleColon,
+};
+use crate::exp;
+use crate::parser::{CommaRecoveryMode, ExprKind, RecoverColon, RecoverComma};
+
+/// Specifies how to parse a path.
+#[derive(Copy, Clone, PartialEq)]
+pub(super) enum PathStyle {
+    /// In some contexts, notably in expressions, paths with generic arguments are ambiguous
+    /// with something else. For example, in expressions `segment < ....` can be interpreted
+    /// as a comparison and `segment ( ....` can be interpreted as a function call.
+    /// In all such contexts the non-path interpretation is preferred by default for practical
+    /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
+    /// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
+    ///
+    /// Also, a path may never be followed by a `:`. This means that we can eagerly recover if
+    /// we encounter it.
+    Expr,
+    /// The same as `Expr`, but may be followed by a `:`.
+    /// For example, this code:
+    /// ```rust
+    /// struct S;
+    ///
+    /// let S: S;
+    /// //  ^ Followed by a `:`
+    /// ```
+    Pat,
+    /// In other contexts, notably in types, no ambiguity exists and paths can be written
+    /// without the disambiguator, e.g., `x<y>` - unambiguously a path.
+    /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
+    Type,
+    /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
+    /// visibilities or attributes.
+    /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
+    /// (paths in "mod" contexts have to be checked later for absence of generic arguments
+    /// anyway, due to macros), but it is used to avoid weird suggestions about expected
+    /// tokens when something goes wrong.
+    Mod,
+}
+
+impl PathStyle {
+    fn has_generic_ambiguity(&self) -> bool {
+        matches!(self, Self::Expr | Self::Pat)
+    }
+}
+
+impl<'a> Parser<'a> {
+    /// Parses a qualified path.
+    /// Assumes that the leading `<` has been parsed already.
+    ///
+    /// `qualified_path = <type [as trait_ref]>::path`
+    ///
+    /// # Examples
+    /// `<T>::default`
+    /// `<T as U>::a`
+    /// `<T as U>::F::a<S>` (without disambiguator)
+    /// `<T as U>::F::a::<S>` (with disambiguator)
+    pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (P<QSelf>, Path)> {
+        let lo = self.prev_token.span;
+        let ty = self.parse_ty()?;
+
+        // `path` will contain the prefix of the path up to the `>`,
+        // if any (e.g., `U` in the `<T as U>::*` examples
+        // above). `path_span` has the span of that path, or an empty
+        // span in the case of something like `<T>::Bar`.
+        let (mut path, path_span);
+        if self.eat_keyword(exp!(As)) {
+            let path_lo = self.token.span;
+            path = self.parse_path(PathStyle::Type)?;
+            path_span = path_lo.to(self.prev_token.span);
+        } else {
+            path_span = self.token.span.to(self.token.span);
+            path = ast::Path { segments: ThinVec::new(), span: path_span, tokens: None };
+        }
+
+        // See doc comment for `unmatched_angle_bracket_count`.
+        self.expect(exp!(Gt))?;
+        if self.unmatched_angle_bracket_count > 0 {
+            self.unmatched_angle_bracket_count -= 1;
+            debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
+        }
+
+        let is_import_coupler = self.is_import_coupler();
+        if !is_import_coupler && !self.recover_colon_before_qpath_proj() {
+            self.expect(exp!(PathSep))?;
+        }
+
+        let qself = P(QSelf { ty, path_span, position: path.segments.len() });
+        if !is_import_coupler {
+            self.parse_path_segments(&mut path.segments, style, None)?;
+        }
+
+        Ok((
+            qself,
+            Path { segments: path.segments, span: lo.to(self.prev_token.span), tokens: None },
+        ))
+    }
+
+    /// Recover from an invalid single colon, when the user likely meant a qualified path.
+    /// We avoid emitting this if not followed by an identifier, as our assumption that the user
+    /// intended this to be a qualified path may not be correct.
+    ///
+    /// ```ignore (diagnostics)
+    /// <Bar as Baz<T>>:Qux
+    ///                ^ help: use double colon
+    /// ```
+    fn recover_colon_before_qpath_proj(&mut self) -> bool {
+        if !self.check_noexpect(&TokenKind::Colon)
+            || self.look_ahead(1, |t| !t.is_non_reserved_ident())
+        {
+            return false;
+        }
+
+        self.bump(); // colon
+
+        self.dcx()
+            .struct_span_err(
+                self.prev_token.span,
+                "found single colon before projection in qualified path",
+            )
+            .with_span_suggestion(
+                self.prev_token.span,
+                "use double colon",
+                "::",
+                Applicability::MachineApplicable,
+            )
+            .emit();
+
+        true
+    }
+
+    pub(super) fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> {
+        self.parse_path_inner(style, None)
+    }
+
+    /// Parses simple paths.
+    ///
+    /// `path = [::] segment+`
+    /// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
+    ///
+    /// # Examples
+    /// `a::b::C<D>` (without disambiguator)
+    /// `a::b::C::<D>` (with disambiguator)
+    /// `Fn(Args)` (without disambiguator)
+    /// `Fn::(Args)` (with disambiguator)
+    pub(super) fn parse_path_inner(
+        &mut self,
+        style: PathStyle,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, Path> {
+        let reject_generics_if_mod_style = |parser: &Parser<'_>, path: Path| {
+            // Ensure generic arguments don't end up in attribute paths, such as:
+            //
+            //     macro_rules! m {
+            //         ($p:path) => { #[$p] struct S; }
+            //     }
+            //
+            //     m!(inline<u8>); //~ ERROR: unexpected generic arguments in path
+            //
+            if style == PathStyle::Mod && path.segments.iter().any(|segment| segment.args.is_some())
+            {
+                let span = path
+                    .segments
+                    .iter()
+                    .filter_map(|segment| segment.args.as_ref())
+                    .map(|arg| arg.span())
+                    .collect::<Vec<_>>();
+                parser.dcx().emit_err(errors::GenericsInPath { span });
+                // Ignore these arguments to prevent unexpected behaviors.
+                let segments = path
+                    .segments
+                    .iter()
+                    .map(|segment| PathSegment { ident: segment.ident, id: segment.id, args: None })
+                    .collect();
+                Path { segments, ..path }
+            } else {
+                path
+            }
+        };
+
+        if let Some(path) =
+            self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
+        {
+            return Ok(reject_generics_if_mod_style(self, path));
+        }
+
+        // If we have a `ty` metavar in the form of a path, reparse it directly as a path, instead
+        // of reparsing it as a `ty` and then extracting the path.
+        if let Some(path) = self.eat_metavar_seq(MetaVarKind::Ty { is_path: true }, |this| {
+            this.parse_path(PathStyle::Type)
+        }) {
+            return Ok(reject_generics_if_mod_style(self, path));
+        }
+
+        let lo = self.token.span;
+        let mut segments = ThinVec::new();
+        let mod_sep_ctxt = self.token.span.ctxt();
+        if self.eat_path_sep() {
+            segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
+        }
+        self.parse_path_segments(&mut segments, style, ty_generics)?;
+        Ok(Path { segments, span: lo.to(self.prev_token.span), tokens: None })
+    }
+
+    pub(super) fn parse_path_segments(
+        &mut self,
+        segments: &mut ThinVec<PathSegment>,
+        style: PathStyle,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, ()> {
+        loop {
+            let segment = self.parse_path_segment(style, ty_generics)?;
+            if style.has_generic_ambiguity() {
+                // In order to check for trailing angle brackets, we must have finished
+                // recursing (`parse_path_segment` can indirectly call this function),
+                // that is, the next token must be the highlighted part of the below example:
+                //
+                // `Foo::<Bar as Baz<T>>::Qux`
+                //                      ^ here
+                //
+                // As opposed to the below highlight (if we had only finished the first
+                // recursion):
+                //
+                // `Foo::<Bar as Baz<T>>::Qux`
+                //                     ^ here
+                //
+                // `PathStyle::Expr` is only provided at the root invocation and never in
+                // `parse_path_segment` to recurse and therefore can be checked to maintain
+                // this invariant.
+                self.check_trailing_angle_brackets(&segment, &[exp!(PathSep)]);
+            }
+            segments.push(segment);
+
+            if self.is_import_coupler() || !self.eat_path_sep() {
+                // IMPORTANT: We can *only ever* treat single colons as typo'ed double colons in
+                // expression contexts (!) since only there paths cannot possibly be followed by
+                // a colon and still form a syntactically valid construct. In pattern contexts,
+                // a path may be followed by a type annotation. E.g., `let pat:ty`. In type
+                // contexts, a path may be followed by a list of bounds. E.g., `where ty:bound`.
+                if self.may_recover()
+                    && style == PathStyle::Expr // (!)
+                    && self.token == token::Colon
+                    && self.look_ahead(1, |token| token.is_non_reserved_ident())
+                {
+                    // Emit a special error message for `a::b:c` to help users
+                    // otherwise, `a: c` might have meant to introduce a new binding
+                    if self.token.span.lo() == self.prev_token.span.hi()
+                        && self.look_ahead(1, |token| self.token.span.hi() == token.span.lo())
+                    {
+                        self.bump(); // bump past the colon
+                        self.dcx().emit_err(PathSingleColon {
+                            span: self.prev_token.span,
+                            suggestion: self.prev_token.span.shrink_to_hi(),
+                        });
+                    }
+                    continue;
+                }
+
+                return Ok(());
+            }
+        }
+    }
+
+    /// Eat `::` or, potentially, `:::`.
+    #[must_use]
+    pub(super) fn eat_path_sep(&mut self) -> bool {
+        let result = self.eat(exp!(PathSep));
+        if result && self.may_recover() {
+            if self.eat_noexpect(&token::Colon) {
+                self.dcx().emit_err(PathTripleColon { span: self.prev_token.span });
+            }
+        }
+        result
+    }
+
+    pub(super) fn parse_path_segment(
+        &mut self,
+        style: PathStyle,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, PathSegment> {
+        let ident = self.parse_path_segment_ident()?;
+        let is_args_start = |token: &Token| {
+            matches!(token.kind, token::Lt | token::Shl | token::OpenParen | token::LArrow)
+        };
+        let check_args_start = |this: &mut Self| {
+            this.expected_token_types.insert(TokenType::Lt);
+            this.expected_token_types.insert(TokenType::OpenParen);
+            is_args_start(&this.token)
+        };
+
+        Ok(
+            if style == PathStyle::Type && check_args_start(self)
+                || style != PathStyle::Mod && self.check_path_sep_and_look_ahead(is_args_start)
+            {
+                // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
+                // it isn't, then we reset the unmatched angle bracket count as we're about to start
+                // parsing a new path.
+                if style == PathStyle::Expr {
+                    self.unmatched_angle_bracket_count = 0;
+                }
+
+                // Generic arguments are found - `<`, `(`, `::<` or `::(`.
+                // First, eat `::` if it exists.
+                let _ = self.eat_path_sep();
+
+                let lo = self.token.span;
+                let args = if self.eat_lt() {
+                    // `<'a, T, A = U>`
+                    let args = self.parse_angle_args_with_leading_angle_bracket_recovery(
+                        style,
+                        lo,
+                        ty_generics,
+                    )?;
+                    self.expect_gt().map_err(|mut err| {
+                        // Try to recover a `:` into a `::`
+                        if self.token == token::Colon
+                            && self.look_ahead(1, |token| token.is_non_reserved_ident())
+                        {
+                            err.cancel();
+                            err = self.dcx().create_err(PathSingleColon {
+                                span: self.token.span,
+                                suggestion: self.prev_token.span.shrink_to_hi(),
+                            });
+                        }
+                        // Attempt to find places where a missing `>` might belong.
+                        else if let Some(arg) = args
+                            .iter()
+                            .rev()
+                            .find(|arg| !matches!(arg, AngleBracketedArg::Constraint(_)))
+                        {
+                            err.span_suggestion_verbose(
+                                arg.span().shrink_to_hi(),
+                                "you might have meant to end the type parameters here",
+                                ">",
+                                Applicability::MaybeIncorrect,
+                            );
+                        }
+                        err
+                    })?;
+                    let span = lo.to(self.prev_token.span);
+                    AngleBracketedArgs { args, span }.into()
+                } else if self.token == token::OpenParen
+                    // FIXME(return_type_notation): Could also recover `...` here.
+                    && self.look_ahead(1, |t| *t == token::DotDot)
+                {
+                    self.bump(); // (
+                    self.bump(); // ..
+                    self.expect(exp!(CloseParen))?;
+                    let span = lo.to(self.prev_token.span);
+
+                    self.psess.gated_spans.gate(sym::return_type_notation, span);
+
+                    let prev_lo = self.prev_token.span.shrink_to_hi();
+                    if self.eat_noexpect(&token::RArrow) {
+                        let lo = self.prev_token.span;
+                        let ty = self.parse_ty()?;
+                        let span = lo.to(ty.span);
+                        let suggestion = prev_lo.to(ty.span);
+                        self.dcx()
+                            .emit_err(errors::BadReturnTypeNotationOutput { span, suggestion });
+                    }
+
+                    P(ast::GenericArgs::ParenthesizedElided(span))
+                } else {
+                    // `(T, U) -> R`
+
+                    let prev_token_before_parsing = self.prev_token;
+                    let token_before_parsing = self.token;
+                    let mut snapshot = None;
+                    if self.may_recover()
+                        && prev_token_before_parsing == token::PathSep
+                        && (style == PathStyle::Expr && self.token.can_begin_expr()
+                            || style == PathStyle::Pat
+                                && self.token.can_begin_pattern(token::NtPatKind::PatParam {
+                                    inferred: false,
+                                }))
+                    {
+                        snapshot = Some(self.create_snapshot_for_diagnostic());
+                    }
+
+                    let dcx = self.dcx();
+                    let parse_params_result = self.parse_paren_comma_seq(|p| {
+                        let param = p.parse_param_general(|_| false, false, false);
+                        param.map(move |param| {
+                            if !matches!(param.pat.kind, PatKind::Missing) {
+                                dcx.emit_err(FnPathFoundNamedParams {
+                                    named_param_span: param.pat.span,
+                                });
+                            }
+                            if matches!(param.ty.kind, TyKind::CVarArgs) {
+                                dcx.emit_err(PathFoundCVariadicParams { span: param.pat.span });
+                            }
+                            if !param.attrs.is_empty() {
+                                dcx.emit_err(PathFoundAttributeInParams {
+                                    span: param.attrs[0].span,
+                                });
+                            }
+                            param.ty
+                        })
+                    });
+
+                    let (inputs, _) = match parse_params_result {
+                        Ok(output) => output,
+                        Err(mut error) if prev_token_before_parsing == token::PathSep => {
+                            error.span_label(
+                                prev_token_before_parsing.span.to(token_before_parsing.span),
+                                "while parsing this parenthesized list of type arguments starting here",
+                            );
+
+                            if let Some(mut snapshot) = snapshot {
+                                snapshot.recover_fn_call_leading_path_sep(
+                                    style,
+                                    prev_token_before_parsing,
+                                    &mut error,
+                                )
+                            }
+
+                            return Err(error);
+                        }
+                        Err(error) => return Err(error),
+                    };
+                    let inputs_span = lo.to(self.prev_token.span);
+                    let output =
+                        self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?;
+                    let span = ident.span.to(self.prev_token.span);
+                    ParenthesizedArgs { span, inputs, inputs_span, output }.into()
+                };
+
+                PathSegment { ident, args: Some(args), id: ast::DUMMY_NODE_ID }
+            } else {
+                // Generic arguments are not found.
+                PathSegment::from_ident(ident)
+            },
+        )
+    }
+
+    pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
+        match self.token.ident() {
+            Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => {
+                self.bump();
+                Ok(ident)
+            }
+            _ => self.parse_ident(),
+        }
+    }
+
+    /// Recover `$path::(...)` as `$path(...)`.
+    ///
+    /// ```ignore (diagnostics)
+    /// foo::(420, "bar")
+    ///    ^^ remove extra separator to make the function call
+    /// // or
+    /// match x {
+    ///    Foo::(420, "bar") => { ... },
+    ///       ^^ remove extra separator to turn this into tuple struct pattern
+    ///    _ => { ... },
+    /// }
+    /// ```
+    fn recover_fn_call_leading_path_sep(
+        &mut self,
+        style: PathStyle,
+        prev_token_before_parsing: Token,
+        error: &mut Diag<'_>,
+    ) {
+        match style {
+            PathStyle::Expr
+                if let Ok(_) = self
+                    .parse_paren_comma_seq(|p| p.parse_expr())
+                    .map_err(|error| error.cancel()) => {}
+            PathStyle::Pat
+                if let Ok(_) = self
+                    .parse_paren_comma_seq(|p| {
+                        p.parse_pat_allow_top_guard(
+                            None,
+                            RecoverComma::No,
+                            RecoverColon::No,
+                            CommaRecoveryMode::LikelyTuple,
+                        )
+                    })
+                    .map_err(|error| error.cancel()) => {}
+            _ => {
+                return;
+            }
+        }
+
+        if let token::PathSep | token::RArrow = self.token.kind {
+            return;
+        }
+
+        error.span_suggestion_verbose(
+            prev_token_before_parsing.span,
+            format!(
+                "consider removing the `::` here to {}",
+                match style {
+                    PathStyle::Expr => "call the expression",
+                    PathStyle::Pat => "turn this into a tuple struct pattern",
+                    _ => {
+                        return;
+                    }
+                }
+            ),
+            "",
+            Applicability::MaybeIncorrect,
+        );
+    }
+
+    /// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
+    /// For the purposes of understanding the parsing logic of generic arguments, this function
+    /// can be thought of being the same as just calling `self.parse_angle_args()` if the source
+    /// had the correct amount of leading angle brackets.
+    ///
+    /// ```ignore (diagnostics)
+    /// bar::<<<<T as Foo>::Output>();
+    ///      ^^ help: remove extra angle brackets
+    /// ```
+    fn parse_angle_args_with_leading_angle_bracket_recovery(
+        &mut self,
+        style: PathStyle,
+        lo: Span,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
+        // We need to detect whether there are extra leading left angle brackets and produce an
+        // appropriate error and suggestion. This cannot be implemented by looking ahead at
+        // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
+        // then there won't be matching `>` tokens to find.
+        //
+        // To explain how this detection works, consider the following example:
+        //
+        // ```ignore (diagnostics)
+        // bar::<<<<T as Foo>::Output>();
+        //      ^^ help: remove extra angle brackets
+        // ```
+        //
+        // Parsing of the left angle brackets starts in this function. We start by parsing the
+        // `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
+        // `eat_lt`):
+        //
+        // *Upcoming tokens:* `<<<<T as Foo>::Output>;`
+        // *Unmatched count:* 1
+        // *`parse_path_segment` calls deep:* 0
+        //
+        // This has the effect of recursing as this function is called if a `<` character
+        // is found within the expected generic arguments:
+        //
+        // *Upcoming tokens:* `<<<T as Foo>::Output>;`
+        // *Unmatched count:* 2
+        // *`parse_path_segment` calls deep:* 1
+        //
+        // Eventually we will have recursed until having consumed all of the `<` tokens and
+        // this will be reflected in the count:
+        //
+        // *Upcoming tokens:* `T as Foo>::Output>;`
+        // *Unmatched count:* 4
+        // `parse_path_segment` calls deep:* 3
+        //
+        // The parser will continue until reaching the first `>` - this will decrement the
+        // unmatched angle bracket count and return to the parent invocation of this function
+        // having succeeded in parsing:
+        //
+        // *Upcoming tokens:* `::Output>;`
+        // *Unmatched count:* 3
+        // *`parse_path_segment` calls deep:* 2
+        //
+        // This will continue until the next `>` character which will also return successfully
+        // to the parent invocation of this function and decrement the count:
+        //
+        // *Upcoming tokens:* `;`
+        // *Unmatched count:* 2
+        // *`parse_path_segment` calls deep:* 1
+        //
+        // At this point, this function will expect to find another matching `>` character but
+        // won't be able to and will return an error. This will continue all the way up the
+        // call stack until the first invocation:
+        //
+        // *Upcoming tokens:* `;`
+        // *Unmatched count:* 2
+        // *`parse_path_segment` calls deep:* 0
+        //
+        // In doing this, we have managed to work out how many unmatched leading left angle
+        // brackets there are, but we cannot recover as the unmatched angle brackets have
+        // already been consumed. To remedy this, we keep a snapshot of the parser state
+        // before we do the above. We can then inspect whether we ended up with a parsing error
+        // and unmatched left angle brackets and if so, restore the parser state before we
+        // consumed any `<` characters to emit an error and consume the erroneous tokens to
+        // recover by attempting to parse again.
+        //
+        // In practice, the recursion of this function is indirect and there will be other
+        // locations that consume some `<` characters - as long as we update the count when
+        // this happens, it isn't an issue.
+
+        let is_first_invocation = style == PathStyle::Expr;
+        // Take a snapshot before attempting to parse - we can restore this later.
+        let snapshot = is_first_invocation.then(|| self.clone());
+
+        self.angle_bracket_nesting += 1;
+        debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
+        match self.parse_angle_args(ty_generics) {
+            Ok(args) => {
+                self.angle_bracket_nesting -= 1;
+                Ok(args)
+            }
+            Err(e) if self.angle_bracket_nesting > 10 => {
+                self.angle_bracket_nesting -= 1;
+                // When encountering severely malformed code where there are several levels of
+                // nested unclosed angle args (`f::<f::<f::<f::<...`), we avoid severe O(n^2)
+                // behavior by bailing out earlier (#117080).
+                e.emit().raise_fatal();
+            }
+            Err(e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
+                self.angle_bracket_nesting -= 1;
+
+                // Swap `self` with our backup of the parser state before attempting to parse
+                // generic arguments.
+                let snapshot = mem::replace(self, snapshot.unwrap());
+
+                // Eat the unmatched angle brackets.
+                let all_angle_brackets = (0..snapshot.unmatched_angle_bracket_count)
+                    .fold(true, |a, _| a && self.eat_lt());
+
+                if !all_angle_brackets {
+                    // If there are other tokens in between the extraneous `<`s, we cannot simply
+                    // suggest to remove them. This check also prevents us from accidentally ending
+                    // up in the middle of a multibyte character (issue #84104).
+                    let _ = mem::replace(self, snapshot);
+                    Err(e)
+                } else {
+                    // Cancel error from being unable to find `>`. We know the error
+                    // must have been this due to a non-zero unmatched angle bracket
+                    // count.
+                    e.cancel();
+
+                    debug!(
+                        "parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
+                         snapshot.count={:?}",
+                        snapshot.unmatched_angle_bracket_count,
+                    );
+
+                    // Make a span over ${unmatched angle bracket count} characters.
+                    // This is safe because `all_angle_brackets` ensures that there are only `<`s,
+                    // i.e. no multibyte characters, in this range.
+                    let span = lo
+                        .with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count.into()));
+                    self.dcx().emit_err(errors::UnmatchedAngle {
+                        span,
+                        plural: snapshot.unmatched_angle_bracket_count > 1,
+                    });
+
+                    // Try again without unmatched angle bracket characters.
+                    self.parse_angle_args(ty_generics)
+                }
+            }
+            Err(e) => {
+                self.angle_bracket_nesting -= 1;
+                Err(e)
+            }
+        }
+    }
+
+    /// Parses (possibly empty) list of generic arguments / associated item constraints,
+    /// possibly including trailing comma.
+    pub(super) fn parse_angle_args(
+        &mut self,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
+        let mut args = ThinVec::new();
+        while let Some(arg) = self.parse_angle_arg(ty_generics)? {
+            args.push(arg);
+            if !self.eat(exp!(Comma)) {
+                if self.check_noexpect(&TokenKind::Semi)
+                    && self.look_ahead(1, |t| t.is_ident() || t.is_lifetime())
+                {
+                    // Add `>` to the list of expected tokens.
+                    self.check(exp!(Gt));
+                    // Handle `,` to `;` substitution
+                    let mut err = self.unexpected().unwrap_err();
+                    self.bump();
+                    err.span_suggestion_verbose(
+                        self.prev_token.span.until(self.token.span),
+                        "use a comma to separate type parameters",
+                        ", ",
+                        Applicability::MachineApplicable,
+                    );
+                    err.emit();
+                    continue;
+                }
+                if !self.token.kind.should_end_const_arg()
+                    && self.handle_ambiguous_unbraced_const_arg(&mut args)?
+                {
+                    // We've managed to (partially) recover, so continue trying to parse
+                    // arguments.
+                    continue;
+                }
+                break;
+            }
+        }
+        Ok(args)
+    }
+
+    /// Parses a single argument in the angle arguments `<...>` of a path segment.
+    fn parse_angle_arg(
+        &mut self,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, Option<AngleBracketedArg>> {
+        let lo = self.token.span;
+        let arg = self.parse_generic_arg(ty_generics)?;
+        match arg {
+            Some(arg) => {
+                // we are using noexpect here because we first want to find out if either `=` or `:`
+                // is present and then use that info to push the other token onto the tokens list
+                let separated =
+                    self.check_noexpect(&token::Colon) || self.check_noexpect(&token::Eq);
+                if separated && (self.check(exp!(Colon)) | self.check(exp!(Eq))) {
+                    let arg_span = arg.span();
+                    let (binder, ident, gen_args) = match self.get_ident_from_generic_arg(&arg) {
+                        Ok(ident_gen_args) => ident_gen_args,
+                        Err(()) => return Ok(Some(AngleBracketedArg::Arg(arg))),
+                    };
+                    if binder {
+                        // FIXME(compiler-errors): this could be improved by suggesting lifting
+                        // this up to the trait, at least before this becomes real syntax.
+                        // e.g. `Trait<for<'a> Assoc = Ty>` -> `for<'a> Trait<Assoc = Ty>`
+                        return Err(self.dcx().struct_span_err(
+                            arg_span,
+                            "`for<...>` is not allowed on associated type bounds",
+                        ));
+                    }
+                    let kind = if self.eat(exp!(Colon)) {
+                        AssocItemConstraintKind::Bound { bounds: self.parse_generic_bounds()? }
+                    } else if self.eat(exp!(Eq)) {
+                        self.parse_assoc_equality_term(
+                            ident,
+                            gen_args.as_ref(),
+                            self.prev_token.span,
+                        )?
+                    } else {
+                        unreachable!();
+                    };
+
+                    let span = lo.to(self.prev_token.span);
+
+                    let constraint =
+                        AssocItemConstraint { id: ast::DUMMY_NODE_ID, ident, gen_args, kind, span };
+                    Ok(Some(AngleBracketedArg::Constraint(constraint)))
+                } else {
+                    // we only want to suggest `:` and `=` in contexts where the previous token
+                    // is an ident and the current token or the next token is an ident
+                    if self.prev_token.is_ident()
+                        && (self.token.is_ident() || self.look_ahead(1, |token| token.is_ident()))
+                    {
+                        self.check(exp!(Colon));
+                        self.check(exp!(Eq));
+                    }
+                    Ok(Some(AngleBracketedArg::Arg(arg)))
+                }
+            }
+            _ => Ok(None),
+        }
+    }
+
+    /// Parse the term to the right of an associated item equality constraint.
+    ///
+    /// That is, parse `$term` in `Item = $term` where `$term` is a type or
+    /// a const expression (wrapped in curly braces if complex).
+    fn parse_assoc_equality_term(
+        &mut self,
+        ident: Ident,
+        gen_args: Option<&GenericArgs>,
+        eq: Span,
+    ) -> PResult<'a, AssocItemConstraintKind> {
+        let arg = self.parse_generic_arg(None)?;
+        let span = ident.span.to(self.prev_token.span);
+        let term = match arg {
+            Some(GenericArg::Type(ty)) => ty.into(),
+            Some(GenericArg::Const(c)) => {
+                self.psess.gated_spans.gate(sym::associated_const_equality, span);
+                c.into()
+            }
+            Some(GenericArg::Lifetime(lt)) => {
+                let guar = self.dcx().emit_err(errors::LifetimeInEqConstraint {
+                    span: lt.ident.span,
+                    lifetime: lt.ident,
+                    binding_label: span,
+                    colon_sugg: gen_args
+                        .map_or(ident.span, |args| args.span())
+                        .between(lt.ident.span),
+                });
+                self.mk_ty(lt.ident.span, ast::TyKind::Err(guar)).into()
+            }
+            None => {
+                let after_eq = eq.shrink_to_hi();
+                let before_next = self.token.span.shrink_to_lo();
+                let mut err = self
+                    .dcx()
+                    .struct_span_err(after_eq.to(before_next), "missing type to the right of `=`");
+                if matches!(self.token.kind, token::Comma | token::Gt) {
+                    err.span_suggestion(
+                        self.psess.source_map().next_point(eq).to(before_next),
+                        "to constrain the associated type, add a type after `=`",
+                        " TheType",
+                        Applicability::HasPlaceholders,
+                    );
+                    err.span_suggestion(
+                        eq.to(before_next),
+                        format!("remove the `=` if `{ident}` is a type"),
+                        "",
+                        Applicability::MaybeIncorrect,
+                    )
+                } else {
+                    err.span_label(
+                        self.token.span,
+                        format!("expected type, found {}", super::token_descr(&self.token)),
+                    )
+                };
+                return Err(err);
+            }
+        };
+        Ok(AssocItemConstraintKind::Equality { term })
+    }
+
+    /// We do not permit arbitrary expressions as const arguments. They must be one of:
+    /// - An expression surrounded in `{}`.
+    /// - A literal.
+    /// - A numeric literal prefixed by `-`.
+    /// - A single-segment path.
+    pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool {
+        match &expr.kind {
+            ast::ExprKind::Block(_, _)
+            | ast::ExprKind::Lit(_)
+            | ast::ExprKind::IncludedBytes(..) => true,
+            ast::ExprKind::Unary(ast::UnOp::Neg, expr) => {
+                matches!(expr.kind, ast::ExprKind::Lit(_))
+            }
+            // We can only resolve single-segment paths at the moment, because multi-segment paths
+            // require type-checking: see `visit_generic_arg` in `src/librustc_resolve/late.rs`.
+            ast::ExprKind::Path(None, path)
+                if let [segment] = path.segments.as_slice()
+                    && segment.args.is_none() =>
+            {
+                true
+            }
+            _ => false,
+        }
+    }
+
+    /// Parse a const argument, e.g. `<3>`. It is assumed the angle brackets will be parsed by
+    /// the caller.
+    pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
+        // Parse const argument.
+        let value = if self.token.kind == token::OpenBrace {
+            self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?
+        } else {
+            self.handle_unambiguous_unbraced_const_arg()?
+        };
+        Ok(AnonConst { id: ast::DUMMY_NODE_ID, value })
+    }
+
+    /// Parse a generic argument in a path segment.
+    /// This does not include constraints, e.g., `Item = u8`, which is handled in `parse_angle_arg`.
+    pub(super) fn parse_generic_arg(
+        &mut self,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, Option<GenericArg>> {
+        let mut attr_span: Option<Span> = None;
+        if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) {
+            let attrs_wrapper = self.parse_outer_attributes()?;
+            let raw_attrs = attrs_wrapper.take_for_recovery(self.psess);
+            attr_span = Some(raw_attrs[0].span.to(raw_attrs.last().unwrap().span));
+        }
+        let start = self.token.span;
+        let arg = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
+            // Parse lifetime argument.
+            GenericArg::Lifetime(self.expect_lifetime())
+        } else if self.check_const_arg() {
+            // Parse const argument.
+            GenericArg::Const(self.parse_const_arg()?)
+        } else if self.check_type() {
+            // Parse type argument.
+
+            // Proactively create a parser snapshot enabling us to rewind and try to reparse the
+            // input as a const expression in case we fail to parse a type. If we successfully
+            // do so, we will report an error that it needs to be wrapped in braces.
+            let mut snapshot = None;
+            if self.may_recover() && self.token.can_begin_expr() {
+                snapshot = Some(self.create_snapshot_for_diagnostic());
+            }
+
+            match self.parse_ty() {
+                Ok(ty) => {
+                    // Since the type parser recovers from some malformed slice and array types and
+                    // successfully returns a type, we need to look for `TyKind::Err`s in the
+                    // type to determine if error recovery has occurred and if the input is not a
+                    // syntactically valid type after all.
+                    if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind
+                        && let ast::TyKind::Err(_) = inner_ty.kind
+                        && let Some(snapshot) = snapshot
+                        && let Some(expr) =
+                            self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+                    {
+                        return Ok(Some(
+                            self.dummy_const_arg_needs_braces(
+                                self.dcx()
+                                    .struct_span_err(expr.span, "invalid const generic expression"),
+                                expr.span,
+                            ),
+                        ));
+                    }
+
+                    GenericArg::Type(ty)
+                }
+                Err(err) => {
+                    if let Some(snapshot) = snapshot
+                        && let Some(expr) =
+                            self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+                    {
+                        return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
+                    }
+                    // Try to recover from possible `const` arg without braces.
+                    return self.recover_const_arg(start, err).map(Some);
+                }
+            }
+        } else if self.token.is_keyword(kw::Const) {
+            return self.recover_const_param_declaration(ty_generics);
+        } else if let Some(attr_span) = attr_span {
+            let diag = self.dcx().create_err(AttributeOnEmptyType { span: attr_span });
+            return Err(diag);
+        } else {
+            // Fall back by trying to parse a const-expr expression. If we successfully do so,
+            // then we should report an error that it needs to be wrapped in braces.
+            let snapshot = self.create_snapshot_for_diagnostic();
+            let attrs = self.parse_outer_attributes()?;
+            match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
+                Ok((expr, _)) => {
+                    return Ok(Some(self.dummy_const_arg_needs_braces(
+                        self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
+                        expr.span,
+                    )));
+                }
+                Err(err) => {
+                    self.restore_snapshot(snapshot);
+                    err.cancel();
+                    return Ok(None);
+                }
+            }
+        };
+
+        if let Some(attr_span) = attr_span {
+            let guar = self.dcx().emit_err(AttributeOnGenericArg {
+                span: attr_span,
+                fix_span: attr_span.until(arg.span()),
+            });
+            return Ok(Some(match arg {
+                GenericArg::Type(_) => GenericArg::Type(self.mk_ty(attr_span, TyKind::Err(guar))),
+                GenericArg::Const(_) => {
+                    let error_expr = self.mk_expr(attr_span, ExprKind::Err(guar));
+                    GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value: error_expr })
+                }
+                GenericArg::Lifetime(lt) => GenericArg::Lifetime(lt),
+            }));
+        }
+
+        Ok(Some(arg))
+    }
+
+    /// Given a arg inside of generics, we try to destructure it as if it were the LHS in
+    /// `LHS = ...`, i.e. an associated item binding.
+    /// This returns a bool indicating if there are any `for<'a, 'b>` binder args, the
+    /// identifier, and any GAT arguments.
+    fn get_ident_from_generic_arg(
+        &self,
+        gen_arg: &GenericArg,
+    ) -> Result<(bool, Ident, Option<GenericArgs>), ()> {
+        if let GenericArg::Type(ty) = gen_arg {
+            if let ast::TyKind::Path(qself, path) = &ty.kind
+                && qself.is_none()
+                && let [seg] = path.segments.as_slice()
+            {
+                return Ok((false, seg.ident, seg.args.as_deref().cloned()));
+            } else if let ast::TyKind::TraitObject(bounds, ast::TraitObjectSyntax::None) = &ty.kind
+                && let [ast::GenericBound::Trait(trait_ref)] = bounds.as_slice()
+                && trait_ref.modifiers == ast::TraitBoundModifiers::NONE
+                && let [seg] = trait_ref.trait_ref.path.segments.as_slice()
+            {
+                return Ok((true, seg.ident, seg.args.as_deref().cloned()));
+            }
+        }
+        Err(())
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
new file mode 100644
index 00000000000..2fa6520f2a4
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -0,0 +1,1068 @@
+use std::borrow::Cow;
+use std::mem;
+use std::ops::Bound;
+
+use ast::Label;
+use rustc_ast as ast;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
+use rustc_ast::util::classify::{self, TrailingBrace};
+use rustc_ast::{
+    AttrStyle, AttrVec, Block, BlockCheckMode, DUMMY_NODE_ID, Expr, ExprKind, HasAttrs, Local,
+    LocalKind, MacCall, MacCallStmt, MacStmtStyle, Recovered, Stmt, StmtKind,
+};
+use rustc_errors::{Applicability, Diag, PResult};
+use rustc_span::{BytePos, ErrorGuaranteed, Ident, Span, kw, sym};
+use thin_vec::{ThinVec, thin_vec};
+
+use super::attr::InnerAttrForbiddenReason;
+use super::diagnostics::AttemptLocalParseRecovery;
+use super::pat::{PatternLocation, RecoverComma};
+use super::path::PathStyle;
+use super::{
+    AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
+    Trailing, UsePreAttrPos,
+};
+use crate::errors::{self, MalformedLoopLabel};
+use crate::exp;
+
+impl<'a> Parser<'a> {
+    /// Parses a statement. This stops just before trailing semicolons on everything but items.
+    /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
+    ///
+    /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
+    /// whether or not we have attributes.
+    // Public for rustfmt usage.
+    pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
+        Ok(self.parse_stmt_without_recovery(false, force_collect, false).unwrap_or_else(|e| {
+            e.emit();
+            self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
+            None
+        }))
+    }
+
+    /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
+    /// whether or not we have attributes. If `force_full_expr` is true, parses the stmt without
+    /// using `Restriction::STMT_EXPR`. Public for `cfg_eval` macro expansion.
+    pub fn parse_stmt_without_recovery(
+        &mut self,
+        capture_semi: bool,
+        force_collect: ForceCollect,
+        force_full_expr: bool,
+    ) -> PResult<'a, Option<Stmt>> {
+        let pre_attr_pos = self.collect_pos();
+        let attrs = self.parse_outer_attributes()?;
+        let lo = self.token.span;
+
+        if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
+            this.parse_stmt_without_recovery(false, ForceCollect::Yes, false)
+        }) {
+            let mut stmt = stmt.expect("an actual statement");
+            stmt.visit_attrs(|stmt_attrs| {
+                attrs.prepend_to_nt_inner(stmt_attrs);
+            });
+            return Ok(Some(stmt));
+        }
+
+        if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
+            self.bump();
+            let mut_let_span = lo.to(self.token.span);
+            self.dcx().emit_err(errors::InvalidVariableDeclaration {
+                span: mut_let_span,
+                sub: errors::InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
+            });
+        }
+
+        let stmt = if self.token.is_keyword(kw::Super) && self.is_keyword_ahead(1, &[kw::Let]) {
+            self.collect_tokens(None, attrs, force_collect, |this, attrs| {
+                let super_span = this.token.span;
+                this.expect_keyword(exp!(Super))?;
+                this.expect_keyword(exp!(Let))?;
+                this.psess.gated_spans.gate(sym::super_let, super_span);
+                let local = this.parse_local(Some(super_span), attrs)?;
+                let trailing = Trailing::from(capture_semi && this.token == token::Semi);
+                Ok((
+                    this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
+                    trailing,
+                    UsePreAttrPos::No,
+                ))
+            })?
+        } else if self.token.is_keyword(kw::Let) {
+            self.collect_tokens(None, attrs, force_collect, |this, attrs| {
+                this.expect_keyword(exp!(Let))?;
+                let local = this.parse_local(None, attrs)?;
+                let trailing = Trailing::from(capture_semi && this.token == token::Semi);
+                Ok((
+                    this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
+                    trailing,
+                    UsePreAttrPos::No,
+                ))
+            })?
+        } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::MissingLet,
+                force_collect,
+            )?
+        } else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
+            self.bump(); // `auto`
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::UseLetNotAuto,
+                force_collect,
+            )?
+        } else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
+            self.bump(); // `var`
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::UseLetNotVar,
+                force_collect,
+            )?
+        } else if self.check_path()
+            && !self.token.is_qpath_start()
+            && !self.is_path_start_item()
+            && !self.is_builtin()
+        {
+            // We have avoided contextual keywords like `union`, items with `crate` visibility,
+            // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
+            // that starts like a path (1 token), but it fact not a path.
+            // Also, we avoid stealing syntax from `parse_item_`.
+            //
+            // `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
+            // expression, not the statement. (But the statement attributes/tokens are obtained
+            // from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
+            // the things within `StmtKind`.)
+            let stmt = self.collect_tokens(
+                Some(pre_attr_pos),
+                AttrWrapper::empty(),
+                force_collect,
+                |this, _empty_attrs| {
+                    Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
+                },
+            );
+            match stmt {
+                Ok(stmt) => stmt,
+                Err(mut err) => {
+                    self.suggest_add_missing_let_for_stmt(&mut err);
+                    return Err(err);
+                }
+            }
+        } else if let Some(item) = self.parse_item_common(
+            attrs.clone(), // FIXME: unwanted clone of attrs
+            false,
+            true,
+            FnParseMode { req_name: |_| true, req_body: true },
+            force_collect,
+        )? {
+            self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
+        } else if self.eat(exp!(Semi)) {
+            // Do not attempt to parse an expression if we're done here.
+            self.error_outer_attrs(attrs);
+            self.mk_stmt(lo, StmtKind::Empty)
+        } else if self.token != token::CloseBrace {
+            // Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
+            // above.
+            let restrictions =
+                if force_full_expr { Restrictions::empty() } else { Restrictions::STMT_EXPR };
+            let e = self.collect_tokens(
+                Some(pre_attr_pos),
+                AttrWrapper::empty(),
+                force_collect,
+                |this, _empty_attrs| {
+                    let (expr, _) = this.parse_expr_res(restrictions, attrs)?;
+                    Ok((expr, Trailing::No, UsePreAttrPos::Yes))
+                },
+            )?;
+            if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(exp!(Else)) {
+                let bl = self.parse_block()?;
+                // Destructuring assignment ... else.
+                // This is not allowed, but point it out in a nice way.
+                self.dcx().emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
+            }
+            self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
+        } else {
+            self.error_outer_attrs(attrs);
+            return Ok(None);
+        };
+
+        self.maybe_augment_stashed_expr_in_pats_with_suggestions(&stmt);
+        Ok(Some(stmt))
+    }
+
+    fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
+        let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+            let path = this.parse_path(PathStyle::Expr)?;
+
+            if this.eat(exp!(Bang)) {
+                let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
+                return Ok((
+                    stmt_mac,
+                    Trailing::from(this.token == token::Semi),
+                    UsePreAttrPos::No,
+                ));
+            }
+
+            let expr = if this.eat(exp!(OpenBrace)) {
+                this.parse_expr_struct(None, path, true)?
+            } else {
+                let hi = this.prev_token.span;
+                this.mk_expr(lo.to(hi), ExprKind::Path(None, path))
+            };
+
+            let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
+                this.parse_expr_dot_or_call_with(attrs, expr, lo)
+            })?;
+            // `DUMMY_SP` will get overwritten later in this function
+            Ok((
+                this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
+                Trailing::No,
+                UsePreAttrPos::No,
+            ))
+        })?;
+
+        if let StmtKind::Expr(expr) = stmt.kind {
+            // Perform this outside of the `collect_tokens` closure, since our
+            // outer attributes do not apply to this part of the expression.
+            let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
+                this.parse_expr_assoc_rest_with(Bound::Unbounded, true, expr)
+            })?;
+            Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
+        } else {
+            Ok(stmt)
+        }
+    }
+
+    /// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
+    /// At this point, the `!` token after the path has already been eaten.
+    fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
+        let args = self.parse_delim_args()?;
+        let hi = self.prev_token.span;
+
+        let style = match args.delim {
+            Delimiter::Brace => MacStmtStyle::Braces,
+            _ => MacStmtStyle::NoBraces,
+        };
+
+        let mac = P(MacCall { path, args });
+
+        let kind = if (style == MacStmtStyle::Braces
+            && !matches!(self.token.kind, token::Dot | token::Question))
+            || matches!(
+                self.token.kind,
+                token::Semi
+                    | token::Eof
+                    | token::CloseInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Stmt))
+            ) {
+            StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
+        } else {
+            // Since none of the above applied, this is an expression statement macro.
+            let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
+            let e = self.maybe_recover_from_bad_qpath(e)?;
+            let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
+            let (e, _) = self.parse_expr_assoc_rest_with(Bound::Unbounded, false, e)?;
+            StmtKind::Expr(e)
+        };
+        Ok(self.mk_stmt(lo.to(hi), kind))
+    }
+
+    /// Error on outer attributes in this context.
+    /// Also error if the previous token was a doc comment.
+    fn error_outer_attrs(&self, attrs: AttrWrapper) {
+        if !attrs.is_empty()
+            && let attrs @ [.., last] = &*attrs.take_for_recovery(self.psess)
+        {
+            if last.is_doc_comment() {
+                self.dcx().emit_err(errors::DocCommentDoesNotDocumentAnything {
+                    span: last.span,
+                    missing_comma: None,
+                });
+            } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
+                self.dcx().emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
+            }
+        }
+    }
+
+    fn recover_stmt_local_after_let(
+        &mut self,
+        lo: Span,
+        attrs: AttrWrapper,
+        subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
+        force_collect: ForceCollect,
+    ) -> PResult<'a, Stmt> {
+        let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
+            let local = this.parse_local(None, attrs)?;
+            // FIXME - maybe capture semicolon in recovery?
+            Ok((
+                this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
+                Trailing::No,
+                UsePreAttrPos::No,
+            ))
+        })?;
+        self.dcx()
+            .emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
+        Ok(stmt)
+    }
+
+    /// Parses a local variable declaration.
+    fn parse_local(&mut self, super_: Option<Span>, attrs: AttrVec) -> PResult<'a, P<Local>> {
+        let lo = super_.unwrap_or(self.prev_token.span);
+
+        if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
+            self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
+            self.bump();
+        }
+
+        let (pat, colon) =
+            self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
+
+        let (err, ty, colon_sp) = if colon {
+            // Save the state of the parser before parsing type normally, in case there is a `:`
+            // instead of an `=` typo.
+            let parser_snapshot_before_type = self.clone();
+            let colon_sp = self.prev_token.span;
+            match self.parse_ty() {
+                Ok(ty) => (None, Some(ty), Some(colon_sp)),
+                Err(mut err) => {
+                    err.span_label(
+                        colon_sp,
+                        format!(
+                            "while parsing the type for {}",
+                            pat.descr()
+                                .map_or_else(|| "the binding".to_string(), |n| format!("`{n}`"))
+                        ),
+                    );
+                    // we use noexpect here because we don't actually expect Eq to be here
+                    // but we are still checking for it in order to be able to handle it if
+                    // it is there
+                    let err = if self.check_noexpect(&token::Eq) {
+                        err.emit();
+                        None
+                    } else {
+                        // Rewind to before attempting to parse the type and continue parsing.
+                        let parser_snapshot_after_type =
+                            mem::replace(self, parser_snapshot_before_type);
+                        Some((parser_snapshot_after_type, colon_sp, err))
+                    };
+                    (err, None, Some(colon_sp))
+                }
+            }
+        } else {
+            (None, None, None)
+        };
+        let init = match (self.parse_initializer(err.is_some()), err) {
+            (Ok(init), None) => {
+                // init parsed, ty parsed
+                init
+            }
+            (Ok(init), Some((_, colon_sp, mut err))) => {
+                // init parsed, ty error
+                // Could parse the type as if it were the initializer, it is likely there was a
+                // typo in the code: `:` instead of `=`. Add suggestion and emit the error.
+                err.span_suggestion_short(
+                    colon_sp,
+                    "use `=` if you meant to assign",
+                    " =",
+                    Applicability::MachineApplicable,
+                );
+                err.emit();
+                // As this was parsed successfully, continue as if the code has been fixed for the
+                // rest of the file. It will still fail due to the emitted error, but we avoid
+                // extra noise.
+                init
+            }
+            (Err(init_err), Some((snapshot, _, ty_err))) => {
+                // init error, ty error
+                init_err.cancel();
+                // Couldn't parse the type nor the initializer, only raise the type error and
+                // return to the parser state before parsing the type as the initializer.
+                // let x: <parse_error>;
+                *self = snapshot;
+                return Err(ty_err);
+            }
+            (Err(err), None) => {
+                // init error, ty parsed
+                // Couldn't parse the initializer and we're not attempting to recover a failed
+                // parse of the type, return the error.
+                return Err(err);
+            }
+        };
+        let kind = match init {
+            None => LocalKind::Decl,
+            Some(init) => {
+                if self.eat_keyword(exp!(Else)) {
+                    if self.token.is_keyword(kw::If) {
+                        // `let...else if`. Emit the same error that `parse_block()` would,
+                        // but explicitly point out that this pattern is not allowed.
+                        let msg = "conditional `else if` is not supported for `let...else`";
+                        return Err(self.error_block_no_opening_brace_msg(Cow::from(msg)));
+                    }
+                    let els = self.parse_block()?;
+                    self.check_let_else_init_bool_expr(&init);
+                    self.check_let_else_init_trailing_brace(&init);
+                    LocalKind::InitElse(init, els)
+                } else {
+                    LocalKind::Init(init)
+                }
+            }
+        };
+        let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
+        Ok(P(ast::Local {
+            super_,
+            ty,
+            pat,
+            kind,
+            id: DUMMY_NODE_ID,
+            span: lo.to(hi),
+            colon_sp,
+            attrs,
+            tokens: None,
+        }))
+    }
+
+    fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
+        if let ast::ExprKind::Binary(op, ..) = init.kind {
+            if op.node.is_lazy() {
+                self.dcx().emit_err(errors::InvalidExpressionInLetElse {
+                    span: init.span,
+                    operator: op.node.as_str(),
+                    sugg: errors::WrapInParentheses::Expression {
+                        left: init.span.shrink_to_lo(),
+                        right: init.span.shrink_to_hi(),
+                    },
+                });
+            }
+        }
+    }
+
+    fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
+        if let Some(trailing) = classify::expr_trailing_brace(init) {
+            let (span, sugg) = match trailing {
+                TrailingBrace::MacCall(mac) => (
+                    mac.span(),
+                    errors::WrapInParentheses::MacroArgs {
+                        left: mac.args.dspan.open,
+                        right: mac.args.dspan.close,
+                    },
+                ),
+                TrailingBrace::Expr(expr) => (
+                    expr.span,
+                    errors::WrapInParentheses::Expression {
+                        left: expr.span.shrink_to_lo(),
+                        right: expr.span.shrink_to_hi(),
+                    },
+                ),
+            };
+            self.dcx().emit_err(errors::InvalidCurlyInLetElse {
+                span: span.with_lo(span.hi() - BytePos(1)),
+                sugg,
+            });
+        }
+    }
+
+    /// Parses the RHS of a local variable declaration (e.g., `= 14;`).
+    fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
+        let eq_consumed = match self.token.kind {
+            token::PlusEq
+            | token::MinusEq
+            | token::StarEq
+            | token::SlashEq
+            | token::PercentEq
+            | token::CaretEq
+            | token::AndEq
+            | token::OrEq
+            | token::ShlEq
+            | token::ShrEq => {
+                // Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here
+                // because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the
+                // `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a
+                // `-=`.
+                let extra_op_span = self.psess.source_map().start_point(self.token.span);
+                self.dcx().emit_err(errors::CompoundAssignmentExpressionInLet {
+                    span: self.token.span,
+                    suggestion: extra_op_span,
+                });
+                self.bump();
+                true
+            }
+            _ => self.eat(exp!(Eq)),
+        };
+
+        Ok(if eq_consumed || eq_optional { Some(self.parse_expr()?) } else { None })
+    }
+
+    /// Parses a block. No inner attributes are allowed.
+    pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
+        let (attrs, block) = self.parse_inner_attrs_and_block(None)?;
+        if let [.., last] = &*attrs {
+            let suggest_to_outer = match &last.kind {
+                ast::AttrKind::Normal(attr) => attr.item.is_valid_for_outer_style(),
+                _ => false,
+            };
+            self.error_on_forbidden_inner_attr(
+                last.span,
+                super::attr::InnerAttrPolicy::Forbidden(Some(
+                    InnerAttrForbiddenReason::InCodeBlock,
+                )),
+                suggest_to_outer,
+            );
+        }
+        Ok(block)
+    }
+
+    fn error_block_no_opening_brace_msg(&mut self, msg: Cow<'static, str>) -> Diag<'a> {
+        let prev = self.prev_token.span;
+        let sp = self.token.span;
+        let mut err = self.dcx().struct_span_err(sp, msg);
+        self.label_expected_raw_ref(&mut err);
+
+        let do_not_suggest_help = self.token.is_keyword(kw::In)
+            || self.token == token::Colon
+            || self.prev_token.is_keyword(kw::Raw);
+
+        // Check to see if the user has written something like
+        //
+        //    if (cond)
+        //      bar;
+        //
+        // which is valid in other languages, but not Rust.
+        match self.parse_stmt_without_recovery(false, ForceCollect::No, false) {
+            // If the next token is an open brace, e.g., we have:
+            //
+            //     if expr other_expr {
+            //        ^    ^          ^- lookahead(1) is a brace
+            //        |    |- current token is not "else"
+            //        |- (statement we just parsed)
+            //
+            // the place-inside-a-block suggestion would be more likely wrong than right.
+            //
+            // FIXME(compiler-errors): this should probably parse an arbitrary expr and not
+            // just lookahead one token, so we can see if there's a brace after _that_,
+            // since we want to protect against:
+            //     `if 1 1 + 1 {` being suggested as  `if { 1 } 1 + 1 {`
+            //                                            +   +
+            Ok(Some(_))
+                if (!self.token.is_keyword(kw::Else)
+                    && self.look_ahead(1, |t| t == &token::OpenBrace))
+                    || do_not_suggest_help => {}
+            // Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
+            Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
+            Ok(Some(stmt)) => {
+                let stmt_own_line = self.psess.source_map().is_line_before_span_empty(sp);
+                let stmt_span = if stmt_own_line && self.eat(exp!(Semi)) {
+                    // Expand the span to include the semicolon.
+                    stmt.span.with_hi(self.prev_token.span.hi())
+                } else {
+                    stmt.span
+                };
+                self.suggest_fixes_misparsed_for_loop_head(
+                    &mut err,
+                    prev.between(sp),
+                    stmt_span,
+                    &stmt.kind,
+                );
+            }
+            Err(e) => {
+                e.delay_as_bug();
+            }
+            _ => {}
+        }
+        err.span_label(sp, "expected `{`");
+        err
+    }
+
+    fn suggest_fixes_misparsed_for_loop_head(
+        &self,
+        e: &mut Diag<'_>,
+        between: Span,
+        stmt_span: Span,
+        stmt_kind: &StmtKind,
+    ) {
+        match (&self.token.kind, &stmt_kind) {
+            (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Call(..) = expr.kind => {
+                // for _ in x y() {}
+                e.span_suggestion_verbose(
+                    between,
+                    "you might have meant to write a method call",
+                    ".".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Field(..) = expr.kind => {
+                // for _ in x y.z {}
+                e.span_suggestion_verbose(
+                    between,
+                    "you might have meant to write a field access",
+                    ".".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            (token::CloseBrace, StmtKind::Expr(expr))
+                if let ExprKind::Struct(expr) = &expr.kind
+                    && let None = expr.qself
+                    && expr.path.segments.len() == 1 =>
+            {
+                // This is specific to "mistyped `if` condition followed by empty body"
+                //
+                // for _ in x y {}
+                e.span_suggestion_verbose(
+                    between,
+                    "you might have meant to write a field access",
+                    ".".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            (token::OpenBrace, StmtKind::Expr(expr))
+                if let ExprKind::Lit(lit) = expr.kind
+                    && let None = lit.suffix
+                    && let token::LitKind::Integer | token::LitKind::Float = lit.kind =>
+            {
+                // for _ in x 0 {}
+                // for _ in x 0.0 {}
+                e.span_suggestion_verbose(
+                    between,
+                    format!("you might have meant to write a field access"),
+                    ".".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            (token::OpenBrace, StmtKind::Expr(expr))
+                if let ExprKind::Loop(..)
+                | ExprKind::If(..)
+                | ExprKind::While(..)
+                | ExprKind::Match(..)
+                | ExprKind::ForLoop { .. }
+                | ExprKind::TryBlock(..)
+                | ExprKind::Ret(..)
+                | ExprKind::Closure(..)
+                | ExprKind::Struct(..)
+                | ExprKind::Try(..) = expr.kind =>
+            {
+                // These are more likely to have been meant as a block body.
+                e.multipart_suggestion(
+                    "you might have meant to write this as part of a block",
+                    vec![
+                        (stmt_span.shrink_to_lo(), "{ ".to_string()),
+                        (stmt_span.shrink_to_hi(), " }".to_string()),
+                    ],
+                    // Speculative; has been misleading in the past (#46836).
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            (token::OpenBrace, _) => {}
+            (_, _) => {
+                e.multipart_suggestion(
+                    "you might have meant to write this as part of a block",
+                    vec![
+                        (stmt_span.shrink_to_lo(), "{ ".to_string()),
+                        (stmt_span.shrink_to_hi(), " }".to_string()),
+                    ],
+                    // Speculative; has been misleading in the past (#46836).
+                    Applicability::MaybeIncorrect,
+                );
+            }
+        }
+    }
+
+    fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
+        let tok = super::token_descr(&self.token);
+        let msg = format!("expected `{{`, found {tok}");
+        Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
+    }
+
+    /// Parses a block. Inner attributes are allowed, block labels are not.
+    ///
+    /// If `loop_header` is `Some` and an unexpected block label is encountered,
+    /// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
+    pub(super) fn parse_inner_attrs_and_block(
+        &mut self,
+        loop_header: Option<Span>,
+    ) -> PResult<'a, (AttrVec, P<Block>)> {
+        self.parse_block_common(self.token.span, BlockCheckMode::Default, loop_header)
+    }
+
+    /// Parses a block. Inner attributes are allowed, block labels are not.
+    ///
+    /// If `loop_header` is `Some` and an unexpected block label is encountered,
+    /// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
+    pub(super) fn parse_block_common(
+        &mut self,
+        lo: Span,
+        blk_mode: BlockCheckMode,
+        loop_header: Option<Span>,
+    ) -> PResult<'a, (AttrVec, P<Block>)> {
+        if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) {
+            return Ok((AttrVec::new(), block));
+        }
+
+        let maybe_ident = self.prev_token;
+        self.maybe_recover_unexpected_block_label(loop_header);
+        if !self.eat(exp!(OpenBrace)) {
+            return self.error_block_no_opening_brace();
+        }
+
+        let attrs = self.parse_inner_attributes()?;
+        let tail = match self.maybe_suggest_struct_literal(lo, blk_mode, maybe_ident) {
+            Some(tail) => tail?,
+            None => self.parse_block_tail(lo, blk_mode, AttemptLocalParseRecovery::Yes)?,
+        };
+        Ok((attrs, tail))
+    }
+
+    /// Parses the rest of a block expression or function body.
+    /// Precondition: already parsed the '{'.
+    pub fn parse_block_tail(
+        &mut self,
+        lo: Span,
+        s: BlockCheckMode,
+        recover: AttemptLocalParseRecovery,
+    ) -> PResult<'a, P<Block>> {
+        let mut stmts = ThinVec::new();
+        let mut snapshot = None;
+        while !self.eat(exp!(CloseBrace)) {
+            if self.token == token::Eof {
+                break;
+            }
+            if self.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
+                // Account for `<<<<<<<` diff markers. We can't proactively error here because
+                // that can be a valid path start, so we snapshot and reparse only we've
+                // encountered another parse error.
+                snapshot = Some(self.create_snapshot_for_diagnostic());
+            }
+            let stmt = match self.parse_full_stmt(recover) {
+                Err(mut err) if recover.yes() => {
+                    if let Some(ref mut snapshot) = snapshot {
+                        snapshot.recover_vcs_conflict_marker();
+                    }
+                    if self.token == token::Colon {
+                        // if a previous and next token of the current one is
+                        // integer literal (e.g. `1:42`), it's likely a range
+                        // expression for Pythonistas and we can suggest so.
+                        if self.prev_token.is_integer_lit()
+                            && self.may_recover()
+                            && self.look_ahead(1, |token| token.is_integer_lit())
+                        {
+                            // FIXME(hkmatsumoto): Might be better to trigger
+                            // this only when parsing an index expression.
+                            err.span_suggestion_verbose(
+                                self.token.span,
+                                "you might have meant a range expression",
+                                "..",
+                                Applicability::MaybeIncorrect,
+                            );
+                        } else {
+                            // if next token is following a colon, it's likely a path
+                            // and we can suggest a path separator
+                            self.bump();
+                            if self.token.span.lo() == self.prev_token.span.hi() {
+                                err.span_suggestion_verbose(
+                                    self.prev_token.span,
+                                    "maybe write a path separator here",
+                                    "::",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                        }
+                    }
+
+                    let guar = err.emit();
+                    self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
+                    Some(self.mk_stmt_err(self.token.span, guar))
+                }
+                Ok(stmt) => stmt,
+                Err(err) => return Err(err),
+            };
+            if let Some(stmt) = stmt {
+                stmts.push(stmt);
+            } else {
+                // Found only `;` or `}`.
+                continue;
+            };
+        }
+        Ok(self.mk_block(stmts, s, lo.to(self.prev_token.span)))
+    }
+
+    fn recover_missing_dot(&mut self, err: &mut Diag<'_>) {
+        let Some((ident, _)) = self.token.ident() else {
+            return;
+        };
+        if let Some(c) = ident.name.as_str().chars().next()
+            && c.is_uppercase()
+        {
+            return;
+        }
+        if self.token.is_reserved_ident() && !self.token.is_ident_named(kw::Await) {
+            return;
+        }
+        if self.prev_token.is_reserved_ident() && self.prev_token.is_ident_named(kw::Await) {
+            // Likely `foo.await bar`
+        } else if self.prev_token.is_non_reserved_ident() {
+            // Likely `foo bar`
+        } else if self.prev_token.kind == token::Question {
+            // `foo? bar`
+        } else if self.prev_token.kind == token::CloseParen {
+            // `foo() bar`
+        } else {
+            return;
+        }
+        if self.token.span == self.prev_token.span {
+            // Account for syntax errors in proc-macros.
+            return;
+        }
+        if self.look_ahead(1, |t| [token::Semi, token::Question, token::Dot].contains(&t.kind)) {
+            err.span_suggestion_verbose(
+                self.prev_token.span.between(self.token.span),
+                "you might have meant to write a field access",
+                ".".to_string(),
+                Applicability::MaybeIncorrect,
+            );
+        }
+        if self.look_ahead(1, |t| t.kind == token::OpenParen) {
+            err.span_suggestion_verbose(
+                self.prev_token.span.between(self.token.span),
+                "you might have meant to write a method call",
+                ".".to_string(),
+                Applicability::MaybeIncorrect,
+            );
+        }
+    }
+
+    /// Parses a statement, including the trailing semicolon.
+    pub fn parse_full_stmt(
+        &mut self,
+        recover: AttemptLocalParseRecovery,
+    ) -> PResult<'a, Option<Stmt>> {
+        // Skip looking for a trailing semicolon when we have a metavar seq.
+        if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
+            // Why pass `true` for `force_full_expr`? Statement expressions are less expressive
+            // than "full" expressions, due to the `STMT_EXPR` restriction, and sometimes need
+            // parentheses. E.g. the "full" expression `match paren_around_match {} | true` when
+            // used in statement context must be written `(match paren_around_match {} | true)`.
+            // However, if the expression we are parsing in this statement context was pasted by a
+            // declarative macro, it may have come from a "full" expression context, and lack
+            // these parentheses. So we lift the `STMT_EXPR` restriction to ensure the statement
+            // will reparse successfully.
+            this.parse_stmt_without_recovery(false, ForceCollect::No, true)
+        }) {
+            let stmt = stmt.expect("an actual statement");
+            return Ok(Some(stmt));
+        }
+
+        let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No, false)?
+        else {
+            return Ok(None);
+        };
+
+        let mut eat_semi = true;
+        let mut add_semi_to_stmt = false;
+
+        match &mut stmt.kind {
+            // Expression without semicolon.
+            StmtKind::Expr(expr)
+                if classify::expr_requires_semi_to_be_stmt(expr)
+                    && !expr.attrs.is_empty()
+                    && !matches!(self.token.kind, token::Eof | token::Semi | token::CloseBrace) =>
+            {
+                // The user has written `#[attr] expr` which is unsupported. (#106020)
+                let guar = self.attr_on_non_tail_expr(&expr);
+                // We already emitted an error, so don't emit another type error
+                let sp = expr.span.to(self.prev_token.span);
+                *expr = self.mk_expr_err(sp, guar);
+            }
+
+            // Expression without semicolon.
+            StmtKind::Expr(expr)
+                if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
+            {
+                // Just check for errors and recover; do not eat semicolon yet.
+
+                let expect_result =
+                    if let Err(e) = self.maybe_recover_from_ternary_operator(Some(expr.span)) {
+                        Err(e)
+                    } else {
+                        self.expect_one_of(&[], &[exp!(Semi), exp!(CloseBrace)])
+                    };
+
+                // Try to both emit a better diagnostic, and avoid further errors by replacing
+                // the `expr` with `ExprKind::Err`.
+                let replace_with_err = 'break_recover: {
+                    match expect_result {
+                        Ok(Recovered::No) => None,
+                        Ok(Recovered::Yes(guar)) => {
+                            // Skip type error to avoid extra errors.
+                            Some(guar)
+                        }
+                        Err(e) => {
+                            if self.recover_colon_as_semi() {
+                                // recover_colon_as_semi has already emitted a nicer error.
+                                e.delay_as_bug();
+                                add_semi_to_stmt = true;
+                                eat_semi = false;
+
+                                break 'break_recover None;
+                            }
+
+                            match &expr.kind {
+                                ExprKind::Path(None, ast::Path { segments, .. })
+                                    if let [segment] = segments.as_slice() =>
+                                {
+                                    if self.token == token::Colon
+                                        && self.look_ahead(1, |token| {
+                                            token.is_metavar_block()
+                                                || matches!(
+                                                    token.kind,
+                                                    token::Ident(
+                                                        kw::For | kw::Loop | kw::While,
+                                                        token::IdentIsRaw::No
+                                                    ) | token::OpenBrace
+                                                )
+                                        })
+                                    {
+                                        let snapshot = self.create_snapshot_for_diagnostic();
+                                        let label = Label {
+                                            ident: Ident::from_str_and_span(
+                                                &format!("'{}", segment.ident),
+                                                segment.ident.span,
+                                            ),
+                                        };
+                                        match self.parse_expr_labeled(label, false) {
+                                            Ok(labeled_expr) => {
+                                                e.cancel();
+                                                self.dcx().emit_err(MalformedLoopLabel {
+                                                    span: label.ident.span,
+                                                    suggestion: label.ident.span.shrink_to_lo(),
+                                                });
+                                                *expr = labeled_expr;
+                                                break 'break_recover None;
+                                            }
+                                            Err(err) => {
+                                                err.cancel();
+                                                self.restore_snapshot(snapshot);
+                                            }
+                                        }
+                                    }
+                                }
+                                _ => {}
+                            }
+
+                            let res =
+                                self.check_mistyped_turbofish_with_multiple_type_params(e, expr);
+
+                            Some(if recover.no() {
+                                res?
+                            } else {
+                                res.unwrap_or_else(|mut e| {
+                                    self.recover_missing_dot(&mut e);
+                                    let guar = e.emit();
+                                    self.recover_stmt();
+                                    guar
+                                })
+                            })
+                        }
+                    }
+                };
+
+                if let Some(guar) = replace_with_err {
+                    // We already emitted an error, so don't emit another type error
+                    let sp = expr.span.to(self.prev_token.span);
+                    *expr = self.mk_expr_err(sp, guar);
+                }
+            }
+            StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
+            StmtKind::Let(local) if let Err(mut e) = self.expect_semi() => {
+                // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
+                match &mut local.kind {
+                    LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
+                        self.check_mistyped_turbofish_with_multiple_type_params(e, expr).map_err(
+                            |mut e| {
+                                self.recover_missing_dot(&mut e);
+                                e
+                            },
+                        )?;
+                        // We found `foo<bar, baz>`, have we fully recovered?
+                        self.expect_semi()?;
+                    }
+                    LocalKind::Decl => {
+                        if let Some(colon_sp) = local.colon_sp {
+                            e.span_label(
+                                colon_sp,
+                                format!(
+                                    "while parsing the type for {}",
+                                    local.pat.descr().map_or_else(
+                                        || "the binding".to_string(),
+                                        |n| format!("`{n}`")
+                                    )
+                                ),
+                            );
+                            let suggest_eq = if self.token == token::Dot
+                                && let _ = self.bump()
+                                && let mut snapshot = self.create_snapshot_for_diagnostic()
+                                && let Ok(_) = snapshot
+                                    .parse_dot_suffix_expr(
+                                        colon_sp,
+                                        self.mk_expr_err(
+                                            colon_sp,
+                                            self.dcx()
+                                                .delayed_bug("error during `:` -> `=` recovery"),
+                                        ),
+                                    )
+                                    .map_err(Diag::cancel)
+                            {
+                                true
+                            } else if let Some(op) = self.check_assoc_op()
+                                && op.node.can_continue_expr_unambiguously()
+                            {
+                                true
+                            } else {
+                                false
+                            };
+                            if suggest_eq {
+                                e.span_suggestion_short(
+                                    colon_sp,
+                                    "use `=` if you meant to assign",
+                                    "=",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                        }
+                        return Err(e);
+                    }
+                }
+                eat_semi = false;
+            }
+            StmtKind::Empty | StmtKind::Item(_) | StmtKind::Let(_) | StmtKind::Semi(_) => {
+                eat_semi = false
+            }
+        }
+
+        if add_semi_to_stmt || (eat_semi && self.eat(exp!(Semi))) {
+            stmt = stmt.add_trailing_semicolon();
+        }
+
+        stmt.span = stmt.span.to(self.prev_token.span);
+        Ok(Some(stmt))
+    }
+
+    pub(super) fn mk_block(
+        &self,
+        stmts: ThinVec<Stmt>,
+        rules: BlockCheckMode,
+        span: Span,
+    ) -> P<Block> {
+        P(Block { stmts, id: DUMMY_NODE_ID, rules, span, tokens: None })
+    }
+
+    pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt {
+        Stmt { id: DUMMY_NODE_ID, kind, span }
+    }
+
+    pub(super) fn mk_stmt_err(&self, span: Span, guar: ErrorGuaranteed) -> Stmt {
+        self.mk_stmt(span, StmtKind::Expr(self.mk_expr_err(span, guar)))
+    }
+
+    pub(super) fn mk_block_err(&self, span: Span, guar: ErrorGuaranteed) -> P<Block> {
+        self.mk_block(thin_vec![self.mk_stmt_err(span, guar)], BlockCheckMode::Default, span)
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
new file mode 100644
index 00000000000..43a1d779a75
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -0,0 +1,2935 @@
+#![allow(rustc::symbol_intern_string_literal)]
+
+use std::assert_matches::assert_matches;
+use std::io::prelude::*;
+use std::iter::Peekable;
+use std::path::{Path, PathBuf};
+use std::sync::{Arc, Mutex};
+use std::{io, str};
+
+use ast::token::IdentIsRaw;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, Delimiter, Token};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::{self as ast, PatKind, visit};
+use rustc_ast_pretty::pprust::item_to_string;
+use rustc_errors::emitter::{HumanEmitter, OutputTheme};
+use rustc_errors::translation::Translator;
+use rustc_errors::{DiagCtxt, MultiSpan, PResult};
+use rustc_session::parse::ParseSess;
+use rustc_span::source_map::{FilePathMapping, SourceMap};
+use rustc_span::{
+    BytePos, FileName, Pos, Span, Symbol, create_default_session_globals_then, kw, sym,
+};
+use termcolor::WriteColor;
+
+use crate::parser::{ForceCollect, Parser};
+use crate::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
+
+fn psess() -> ParseSess {
+    ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE])
+}
+
+/// Map string to parser (via tts).
+fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> {
+    unwrap_or_emit_fatal(new_parser_from_source_str(
+        psess,
+        PathBuf::from("bogofile").into(),
+        source_str,
+    ))
+}
+
+fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Arc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
+    let output = Arc::new(Mutex::new(Vec::new()));
+    let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
+    let translator = Translator::with_fallback_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
+    let mut emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), translator)
+        .sm(Some(source_map.clone()))
+        .diagnostic_width(Some(140));
+    emitter = emitter.theme(theme);
+    let dcx = DiagCtxt::new(Box::new(emitter));
+    (dcx, source_map, output)
+}
+
+/// Returns the result of parsing the given string via the given callback.
+///
+/// If there are any errors, this will panic.
+fn with_error_checking_parse<'a, T, F>(s: String, psess: &'a ParseSess, f: F) -> T
+where
+    F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
+{
+    let mut p = string_to_parser(&psess, s);
+    let x = f(&mut p).unwrap();
+    p.dcx().abort_if_errors();
+    x
+}
+
+/// Verifies that parsing the given string using the given callback will
+/// generate an error that contains the given text.
+fn with_expected_parse_error<T, F>(source_str: &str, expected_output: &str, f: F)
+where
+    F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
+{
+    let (handler, source_map, output) = create_test_handler(OutputTheme::Ascii);
+    let psess = ParseSess::with_dcx(handler, source_map);
+    let mut p = string_to_parser(&psess, source_str.to_string());
+    let result = f(&mut p);
+    assert!(result.is_ok());
+
+    let bytes = output.lock().unwrap();
+    let actual_output = str::from_utf8(&bytes).unwrap();
+    println!("expected output:\n------\n{}------", expected_output);
+    println!("actual output:\n------\n{}------", actual_output);
+
+    assert!(actual_output.contains(expected_output))
+}
+
+/// Maps a string to tts, using a made-up filename.
+pub(crate) fn string_to_stream(source_str: String) -> TokenStream {
+    let psess = psess();
+    unwrap_or_emit_fatal(source_str_to_stream(
+        &psess,
+        PathBuf::from("bogofile").into(),
+        source_str,
+        None,
+    ))
+}
+
+/// Does the given string match the pattern? whitespace in the first string
+/// may be deleted or replaced with other whitespace to match the pattern.
+/// This function is relatively Unicode-ignorant; fortunately, the careful design
+/// of UTF-8 mitigates this ignorance. It doesn't do NKF-normalization(?).
+pub(crate) fn matches_codepattern(a: &str, b: &str) -> bool {
+    let mut a_iter = a.chars().peekable();
+    let mut b_iter = b.chars().peekable();
+
+    loop {
+        let (a, b) = match (a_iter.peek(), b_iter.peek()) {
+            (None, None) => return true,
+            (None, _) => return false,
+            (Some(&a), None) => {
+                if rustc_lexer::is_whitespace(a) {
+                    break; // Trailing whitespace check is out of loop for borrowck.
+                } else {
+                    return false;
+                }
+            }
+            (Some(&a), Some(&b)) => (a, b),
+        };
+
+        if rustc_lexer::is_whitespace(a) && rustc_lexer::is_whitespace(b) {
+            // Skip whitespace for `a` and `b`.
+            scan_for_non_ws_or_end(&mut a_iter);
+            scan_for_non_ws_or_end(&mut b_iter);
+        } else if rustc_lexer::is_whitespace(a) {
+            // Skip whitespace for `a`.
+            scan_for_non_ws_or_end(&mut a_iter);
+        } else if a == b {
+            a_iter.next();
+            b_iter.next();
+        } else {
+            return false;
+        }
+    }
+
+    // Check if a has *only* trailing whitespace.
+    a_iter.all(rustc_lexer::is_whitespace)
+}
+
+/// Advances the given peekable `Iterator` until it reaches a non-whitespace character.
+fn scan_for_non_ws_or_end<I: Iterator<Item = char>>(iter: &mut Peekable<I>) {
+    while iter.peek().copied().is_some_and(rustc_lexer::is_whitespace) {
+        iter.next();
+    }
+}
+
+/// Identifies a position in the text by the n'th occurrence of a string.
+struct Position {
+    string: &'static str,
+    count: usize,
+}
+
+struct SpanLabel {
+    start: Position,
+    end: Position,
+    label: &'static str,
+}
+
+struct Shared<T: Write> {
+    data: Arc<Mutex<T>>,
+}
+
+impl<T: Write> WriteColor for Shared<T> {
+    fn supports_color(&self) -> bool {
+        false
+    }
+
+    fn set_color(&mut self, _spec: &termcolor::ColorSpec) -> io::Result<()> {
+        Ok(())
+    }
+
+    fn reset(&mut self) -> io::Result<()> {
+        Ok(())
+    }
+}
+
+impl<T: Write> Write for Shared<T> {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.data.lock().unwrap().write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.data.lock().unwrap().flush()
+    }
+}
+
+#[allow(rustc::untranslatable_diagnostic)] // no translation needed for tests
+fn test_harness(
+    file_text: &str,
+    span_labels: Vec<SpanLabel>,
+    notes: Vec<(Option<(Position, Position)>, &'static str)>,
+    expected_output_ascii: &str,
+    expected_output_unicode: &str,
+) {
+    create_default_session_globals_then(|| {
+        for (theme, expected_output) in [
+            (OutputTheme::Ascii, expected_output_ascii),
+            (OutputTheme::Unicode, expected_output_unicode),
+        ] {
+            let (dcx, source_map, output) = create_test_handler(theme);
+            source_map
+                .new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
+
+            let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
+            let mut msp = MultiSpan::from_span(primary_span);
+            for span_label in &span_labels {
+                let span = make_span(&file_text, &span_label.start, &span_label.end);
+                msp.push_span_label(span, span_label.label);
+                println!("span: {:?} label: {:?}", span, span_label.label);
+                println!("text: {:?}", source_map.span_to_snippet(span));
+            }
+
+            let mut err = dcx.handle().struct_span_err(msp, "foo");
+            for (position, note) in &notes {
+                if let Some((start, end)) = position {
+                    let span = make_span(&file_text, &start, &end);
+                    err.span_note(span, *note);
+                } else {
+                    err.note(*note);
+                }
+            }
+            err.emit();
+
+            assert!(
+                expected_output.chars().next() == Some('\n'),
+                "expected output should begin with newline"
+            );
+            let expected_output = &expected_output[1..];
+
+            let bytes = output.lock().unwrap();
+            let actual_output = str::from_utf8(&bytes).unwrap();
+            println!("expected output:\n------\n{}------", expected_output);
+            println!("actual output:\n------\n{}------", actual_output);
+
+            assert!(expected_output == actual_output)
+        }
+    })
+}
+
+fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
+    let start = make_pos(file_text, start);
+    let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
+    assert!(start <= end);
+    Span::with_root_ctxt(BytePos(start as u32), BytePos(end as u32))
+}
+
+fn make_pos(file_text: &str, pos: &Position) -> usize {
+    let mut remainder = file_text;
+    let mut offset = 0;
+    for _ in 0..pos.count {
+        if let Some(n) = remainder.find(&pos.string) {
+            offset += n;
+            remainder = &remainder[n + 1..];
+        } else {
+            panic!("failed to find {} instances of {:?} in {:?}", pos.count, pos.string, file_text);
+        }
+    }
+    offset
+}
+
+#[test]
+fn ends_on_col0() {
+    test_harness(
+        r#"
+fn foo() {
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "{", count: 1 },
+            end: Position { string: "}", count: 1 },
+            label: "test",
+        }],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:2:10
+  |
+2 |   fn foo() {
+  |  __________^
+3 | | }
+  | |_^ test
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:2:10
+  │
+2 │   fn foo() {
+  │ ┏━━━━━━━━━━┛
+3 │ ┃ }
+  ╰╴┗━┛ test
+
+"#,
+    );
+}
+
+#[test]
+fn ends_on_col2() {
+    test_harness(
+        r#"
+fn foo() {
+
+
+  }
+"#,
+        vec![SpanLabel {
+            start: Position { string: "{", count: 1 },
+            end: Position { string: "}", count: 1 },
+            label: "test",
+        }],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:2:10
+  |
+2 |   fn foo() {
+  |  __________^
+... |
+5 | |   }
+  | |___^ test
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:2:10
+  │
+2 │   fn foo() {
+  │ ┏━━━━━━━━━━┛
+  ‡ ┃
+5 │ ┃   }
+  ╰╴┗━━━┛ test
+
+"#,
+    );
+}
+#[test]
+fn non_nested() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0
+  X1 Y1
+  X2 Y2
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |      X0 Y0
+  |  ____^  -
+  | | ______|
+4 | ||   X1 Y1
+5 | ||   X2 Y2
+  | ||____^__- `Y` is a good letter too
+  | |_____|
+  |       `X` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │      X0 Y0
+  │ ┏━━━━┛  │
+  │ ┃┌──────┘
+4 │ ┃│   X1 Y1
+5 │ ┃│   X2 Y2
+  │ ┃└────╿──┘ `Y` is a good letter too
+  │ ┗━━━━━┥
+  ╰╴      `X` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn nested() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0
+  Y1 X1
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y1", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |      X0 Y0
+  |  ____^  -
+  | | ______|
+4 | ||   Y1 X1
+  | ||____-__^ `X` is a good letter
+  |  |____|
+  |       `Y` is a good letter too
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │      X0 Y0
+  │ ┏━━━━┛  │
+  │ ┃┌──────┘
+4 │ ┃│   Y1 X1
+  │ ┗│━━━━│━━┛ `X` is a good letter
+  │  └────┤
+  ╰╴      `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn multiline_and_normal_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "Y0", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |     X0 Y0 Z0
+  |  ___---^-
+  | |   |
+  | |   `Y` is a good letter too
+4 | |   X1 Y1 Z1
+5 | |   X2 Y2 Z2
+  | |____^ `X` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │     X0 Y0 Z0
+  │ ┏━━━┬──┛─
+  │ ┃   │
+  │ ┃   `Y` is a good letter too
+4 │ ┃   X1 Y1 Z1
+5 │ ┃   X2 Y2 Z2
+  ╰╴┗━━━━┛ `X` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn different_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "X3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |      X0 Y0 Z0
+  |  _______^
+4 | |    X1 Y1 Z1
+  | | _________-
+5 | ||   X2 Y2 Z2
+  | ||____^ `X` is a good letter
+6 |  |   X3 Y3 Z3
+  |  |____- `Y` is a good letter too
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │      X0 Y0 Z0
+  │ ┏━━━━━━━┛
+4 │ ┃    X1 Y1 Z1
+  │ ┃┌─────────┘
+5 │ ┃│   X2 Y2 Z2
+  │ ┗│━━━━┛ `X` is a good letter
+6 │  │   X3 Y3 Z3
+  ╰╴ └────┘ `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_1() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![(None, "bar")],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+  = note: bar
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  │
+  ╰ note: bar
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_2() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![(None, "bar"), (None, "qux")],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+  = note: bar
+  = note: qux
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  │
+  ├ note: bar
+  ╰ note: qux
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_3() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![(None, "bar"), (None, "baz"), (None, "qux")],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+  = note: bar
+  = note: baz
+  = note: qux
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  │
+  ├ note: bar
+  ├ note: baz
+  ╰ note: qux
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_1() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![(
+            Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+            "bar",
+        )],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_2() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "bar",
+            ),
+            (
+                Some((Position { string: "X2", count: 1 }, Position { string: "Y2", count: 1 })),
+                "qux",
+            ),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+note: qux
+ --> test.rs:5:3
+  |
+5 |   X2 Y2 Z2
+  |   ^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+note: qux
+  ╭▸ test.rs:5:3
+  │
+5 │   X2 Y2 Z2
+  ╰╴  ━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_3() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "bar",
+            ),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "baz",
+            ),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "qux",
+            ),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+note: baz
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+note: qux
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+note: baz
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+note: qux
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_4() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "bar",
+            ),
+            (None, "qux"),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+  = note: qux
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  │   ━━━━━━━━
+  ╰ note: qux
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_5() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (None, "bar"),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "qux",
+            ),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+  = note: bar
+note: qux
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  │
+  ╰ note: bar
+note: qux
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_6() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (None, "bar"),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "baz",
+            ),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "qux",
+            ),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+  = note: bar
+note: baz
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+note: qux
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  │
+  ╰ note: bar
+note: baz
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+note: qux
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_7() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z3", count: 1 })),
+                "bar",
+            ),
+            (None, "baz"),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "qux",
+            ),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 | /   X1 Y1 Z1
+5 | |   X2 Y2 Z2
+6 | |   X3 Y3 Z3
+  | |__________^
+  = note: baz
+note: qux
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │ ┏   X1 Y1 Z1
+5 │ ┃   X2 Y2 Z2
+6 │ ┃   X3 Y3 Z3
+  │ ┗━━━━━━━━━━┛
+  ╰ note: baz
+note: qux
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_8() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "bar",
+            ),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "baz",
+            ),
+            (None, "qux"),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+note: baz
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+  = note: qux
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+note: baz
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  │   ━━━━━━━━
+  ╰ note: qux
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_9() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (None, "bar"),
+            (None, "baz"),
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "qux",
+            ),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+  = note: bar
+  = note: baz
+note: qux
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  │
+  ├ note: bar
+  ╰ note: baz
+note: qux
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  ╰╴  ━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn different_note_spanned_10() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "Y0", count: 1 },
+            end: Position { string: "Z0", count: 1 },
+            label: "`X` is a good letter",
+        }],
+        vec![
+            (
+                Some((Position { string: "X1", count: 1 }, Position { string: "Z1", count: 1 })),
+                "bar",
+            ),
+            (None, "baz"),
+            (None, "qux"),
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |   X0 Y0 Z0
+  |      ^^^^^ `X` is a good letter
+  |
+note: bar
+ --> test.rs:4:3
+  |
+4 |   X1 Y1 Z1
+  |   ^^^^^^^^
+  = note: baz
+  = note: qux
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │   X0 Y0 Z0
+  │      ━━━━━ `X` is a good letter
+  ╰╴
+note: bar
+  ╭▸ test.rs:4:3
+  │
+4 │   X1 Y1 Z1
+  │   ━━━━━━━━
+  ├ note: baz
+  ╰ note: qux
+
+"#,
+    );
+}
+
+#[test]
+fn triple_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+            SpanLabel {
+                start: Position { string: "Z0", count: 1 },
+                end: Position { string: "Z2", count: 1 },
+                label: "`Z` label",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |       X0 Y0 Z0
+  |  _____^  -  -
+  | | _______|  |
+  | || _________|
+4 | |||   X1 Y1 Z1
+5 | |||   X2 Y2 Z2
+  | |||____^__-__- `Z` label
+  | ||_____|__|
+  | |______|  `Y` is a good letter too
+  |        `X` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │       X0 Y0 Z0
+  │ ┏━━━━━┛  │  │
+  │ ┃┌───────┘  │
+  │ ┃│┌─────────┘
+4 │ ┃││   X1 Y1 Z1
+5 │ ┃││   X2 Y2 Z2
+  │ ┃│└────╿──│──┘ `Z` label
+  │ ┃└─────│──┤
+  │ ┗━━━━━━┥  `Y` is a good letter too
+  ╰╴       `X` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn triple_exact_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`Z` label",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 | /   X0 Y0 Z0
+4 | |   X1 Y1 Z1
+5 | |   X2 Y2 Z2
+  | |    ^
+  | |    |
+  | |    `X` is a good letter
+  | |____`Y` is a good letter too
+  |      `Z` label
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │ ┏   X0 Y0 Z0
+4 │ ┃   X1 Y1 Z1
+5 │ ┃   X2 Y2 Z2
+  │ ┃    ╿
+  │ ┃    │
+  │ ┃    `X` is a good letter
+  │ ┗━━━━`Y` is a good letter too
+  ╰╴     `Z` label
+
+"#,
+    );
+}
+
+#[test]
+fn minimum_depth() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y1", count: 1 },
+                end: Position { string: "Z2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+            SpanLabel {
+                start: Position { string: "X2", count: 1 },
+                end: Position { string: "Y3", count: 1 },
+                label: "`Z`",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |      X0 Y0 Z0
+  |  _______^
+4 | |    X1 Y1 Z1
+  | | ____^_-
+  | ||____|
+  |  |    `X` is a good letter
+5 |  |   X2 Y2 Z2
+  |  |___-______- `Y` is a good letter too
+  |   ___|
+  |  |
+6 |  |   X3 Y3 Z3
+  |  |_______- `Z`
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │      X0 Y0 Z0
+  │ ┏━━━━━━━┛
+4 │ ┃    X1 Y1 Z1
+  │ ┃┌────╿─┘
+  │ ┗│━━━━┥
+  │  │    `X` is a good letter
+5 │  │   X2 Y2 Z2
+  │  └───│──────┘ `Y` is a good letter too
+  │  ┌───┘
+  │  │
+6 │  │   X3 Y3 Z3
+  ╰╴ └───────┘ `Z`
+
+"#,
+    );
+}
+
+#[test]
+fn non_overlapping() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y2", count: 1 },
+                end: Position { string: "Z3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 | /   X0 Y0 Z0
+4 | |   X1 Y1 Z1
+  | |____^ `X` is a good letter
+5 |     X2 Y2 Z2
+  |  ______-
+6 | |   X3 Y3 Z3
+  | |__________- `Y` is a good letter too
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │ ┏   X0 Y0 Z0
+4 │ ┃   X1 Y1 Z1
+  │ ┗━━━━┛ `X` is a good letter
+5 │     X2 Y2 Z2
+  │ ┌──────┘
+6 │ │   X3 Y3 Z3
+  ╰╴└──────────┘ `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn overlapping_start_and_end() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "Z3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |      X0 Y0 Z0
+  |  _______^
+4 | |    X1 Y1 Z1
+  | | ____^____-
+  | ||____|
+  |  |    `X` is a good letter
+5 |  |   X2 Y2 Z2
+6 |  |   X3 Y3 Z3
+  |  |__________- `Y` is a good letter too
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:6
+  │
+3 │      X0 Y0 Z0
+  │ ┏━━━━━━━┛
+4 │ ┃    X1 Y1 Z1
+  │ ┃┌────╿────┘
+  │ ┗│━━━━┥
+  │  │    `X` is a good letter
+5 │  │   X2 Y2 Z2
+6 │  │   X3 Y3 Z3
+  ╰╴ └──────────┘ `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_primary_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "c", count: 1 },
+                label: "",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:7
+  |
+3 |   a { b { c } d }
+  |   ----^^^^-^^-- `a` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:7
+  │
+3 │   a { b { c } d }
+  ╰╴  ────━━━━─━━── `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiline_notes() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "a", count: 1 },
+            end: Position { string: "d", count: 1 },
+            label: "`a` is a good letter",
+        }],
+        vec![(None, "foo\nbar"), (None, "foo\nbar")],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^^^^^^^^^^ `a` is a good letter
+  |
+  = note: foo
+          bar
+  = note: foo
+          bar
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  │   ━━━━━━━━━━━━━ `a` is a good letter
+  │
+  ├ note: foo
+  │       bar
+  ╰ note: foo
+          bar
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_secondary_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^ `a` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  ╰╴  ━━━━───────━━ `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_primary_without_message_2() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "`b` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "c", count: 1 },
+                label: "",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:7
+  |
+3 |   a { b { c } d }
+  |   ----^^^^-^^--
+  |       |
+  |       `b` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:7
+  │
+3 │   a { b { c } d }
+  │   ────┯━━━─━━──
+  │       │
+  ╰╴      `b` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_secondary_without_message_2() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "`b` is a good letter",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^
+  |       |
+  |       `b` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  │   ━━━━┬──────━━
+  │       │
+  ╰╴      `b` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_secondary_without_message_3() {
+    test_harness(
+        r#"
+fn foo() {
+  a  bc  d
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "b", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a  bc  d
+  |   ^^^^----
+  |   |
+  |   `a` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a  bc  d
+  │   ┯━━━────
+  │   │
+  ╰╴  `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  ╰╴  ━━━━───────━━
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_without_message_2() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "c", count: 1 },
+                label: "",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:7
+  |
+3 |   a { b { c } d }
+  |   ----^^^^-^^--
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:7
+  │
+3 │   a { b { c } d }
+  ╰╴  ────━━━━─━━──
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_with_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "`b` is a good letter",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^
+  |   |   |
+  |   |   `b` is a good letter
+  |   `a` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  │   ┯━━━┬──────━━
+  │   │   │
+  │   │   `b` is a good letter
+  ╰╴  `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn single_label_with_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "a", count: 1 },
+            end: Position { string: "d", count: 1 },
+            label: "`a` is a good letter",
+        }],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^^^^^^^^^^ `a` is a good letter
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  ╰╴  ━━━━━━━━━━━━━ `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn single_label_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "a", count: 1 },
+            end: Position { string: "d", count: 1 },
+            label: "",
+        }],
+        vec![],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^^^^^^^^^^
+
+"#,
+        r#"
+error: foo
+  ╭▸ test.rs:3:3
+  │
+3 │   a { b { c } d }
+  ╰╴  ━━━━━━━━━━━━━
+
+"#,
+    );
+}
+
+#[test]
+fn long_snippet() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "Z3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+  --> test.rs:3:6
+   |
+ 3 |      X0 Y0 Z0
+   |  _______^
+ 4 | |    X1 Y1 Z1
+   | | ____^____-
+   | ||____|
+   |  |    `X` is a good letter
+ 5 |  | 1
+ 6 |  | 2
+ 7 |  | 3
+...   |
+15 |  |   X2 Y2 Z2
+16 |  |   X3 Y3 Z3
+   |  |__________- `Y` is a good letter too
+
+"#,
+        r#"
+error: foo
+   ╭▸ test.rs:3:6
+   │
+ 3 │      X0 Y0 Z0
+   │ ┏━━━━━━━┛
+ 4 │ ┃    X1 Y1 Z1
+   │ ┃┌────╿────┘
+   │ ┗│━━━━┥
+   │  │    `X` is a good letter
+ 5 │  │ 1
+ 6 │  │ 2
+ 7 │  │ 3
+   ‡  │
+15 │  │   X2 Y2 Z2
+16 │  │   X3 Y3 Z3
+   ╰╴ └──────────┘ `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn long_snippet_multiple_spans() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+1
+2
+3
+  X1 Y1 Z1
+4
+5
+6
+  X2 Y2 Z2
+7
+8
+9
+10
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y3", count: 1 },
+                label: "`Y` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "Z2", count: 1 },
+                label: "`Z` is a good letter too",
+            },
+        ],
+        vec![],
+        r#"
+error: foo
+  --> test.rs:3:6
+   |
+ 3 |      X0 Y0 Z0
+   |  _______^
+ 4 | |  1
+ 5 | |  2
+ 6 | |  3
+ 7 | |    X1 Y1 Z1
+   | | _________-
+ 8 | || 4
+ 9 | || 5
+10 | || 6
+11 | ||   X2 Y2 Z2
+   | ||__________- `Z` is a good letter too
+...  |
+15 | |  10
+16 | |    X3 Y3 Z3
+   | |________^ `Y` is a good letter
+
+"#,
+        r#"
+error: foo
+   ╭▸ test.rs:3:6
+   │
+ 3 │      X0 Y0 Z0
+   │ ┏━━━━━━━┛
+ 4 │ ┃  1
+ 5 │ ┃  2
+ 6 │ ┃  3
+ 7 │ ┃    X1 Y1 Z1
+   │ ┃┌─────────┘
+ 8 │ ┃│ 4
+ 9 │ ┃│ 5
+10 │ ┃│ 6
+11 │ ┃│   X2 Y2 Z2
+   │ ┃└──────────┘ `Z` is a good letter too
+   ‡ ┃
+15 │ ┃  10
+16 │ ┃    X3 Y3 Z3
+   ╰╴┗━━━━━━━━┛ `Y` is a good letter
+
+"#,
+    );
+}
+
+/// Parses an item.
+///
+/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
+/// when a syntax error occurred.
+fn parse_item_from_source_str(
+    name: FileName,
+    source: String,
+    psess: &ParseSess,
+) -> PResult<'_, Option<P<ast::Item>>> {
+    unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source))
+        .parse_item(ForceCollect::No)
+}
+
+// Produces a `rustc_span::span`.
+fn sp(a: u32, b: u32) -> Span {
+    Span::with_root_ctxt(BytePos(a), BytePos(b))
+}
+
+/// Parses a string, return an expression.
+fn string_to_expr(source_str: String) -> P<ast::Expr> {
+    with_error_checking_parse(source_str, &psess(), |p| p.parse_expr())
+}
+
+/// Parses a string, returns an item.
+fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
+    with_error_checking_parse(source_str, &psess(), |p| p.parse_item(ForceCollect::No))
+}
+
+#[test]
+fn bad_path_expr_1() {
+    // This should trigger error: expected identifier, found keyword `return`
+    create_default_session_globals_then(|| {
+        with_expected_parse_error(
+            "::abc::def::return",
+            "expected identifier, found keyword `return`",
+            |p| p.parse_expr(),
+        );
+    })
+}
+
+// Checks the token-tree-ization of macros.
+#[test]
+fn string_to_tts_macro() {
+    create_default_session_globals_then(|| {
+        let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
+        let tts = &stream.iter().collect::<Vec<_>>()[..];
+
+        match tts {
+            [
+                TokenTree::Token(
+                    Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
+                    _,
+                ),
+                TokenTree::Token(Token { kind: token::Bang, .. }, _),
+                TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
+                TokenTree::Delimited(.., macro_delim, macro_tts),
+            ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
+                let tts = &macro_tts.iter().collect::<Vec<_>>();
+                match &tts[..] {
+                    [
+                        TokenTree::Delimited(.., first_delim, first_tts),
+                        TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
+                        TokenTree::Delimited(.., second_delim, second_tts),
+                    ] if macro_delim == &Delimiter::Parenthesis => {
+                        let tts = &first_tts.iter().collect::<Vec<_>>();
+                        match &tts[..] {
+                            [
+                                TokenTree::Token(Token { kind: token::Dollar, .. }, _),
+                                TokenTree::Token(
+                                    Token { kind: token::Ident(name, IdentIsRaw::No), .. },
+                                    _,
+                                ),
+                            ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
+                            }
+                            _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
+                        }
+                        let tts = &second_tts.iter().collect::<Vec<_>>();
+                        match &tts[..] {
+                            [
+                                TokenTree::Token(Token { kind: token::Dollar, .. }, _),
+                                TokenTree::Token(
+                                    Token { kind: token::Ident(name, IdentIsRaw::No), .. },
+                                    _,
+                                ),
+                            ] if second_delim == &Delimiter::Parenthesis
+                                && name.as_str() == "a" => {}
+                            _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
+                        }
+                    }
+                    _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
+                }
+            }
+            _ => panic!("value: {:?}", tts),
+        }
+    })
+}
+
+#[test]
+fn string_to_tts_1() {
+    create_default_session_globals_then(|| {
+        let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
+
+        let expected = TokenStream::new(vec![
+            TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
+            TokenTree::token_joint_hidden(
+                token::Ident(Symbol::intern("a"), IdentIsRaw::No),
+                sp(3, 4),
+            ),
+            TokenTree::Delimited(
+                DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
+                // `JointHidden` because the `(` is followed immediately by
+                // `b`, `Alone` because the `)` is followed by whitespace.
+                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
+                Delimiter::Parenthesis,
+                TokenStream::new(vec![
+                    TokenTree::token_joint(
+                        token::Ident(Symbol::intern("b"), IdentIsRaw::No),
+                        sp(5, 6),
+                    ),
+                    TokenTree::token_alone(token::Colon, sp(6, 7)),
+                    // `JointHidden` because the `i32` is immediately followed by the `)`.
+                    TokenTree::token_joint_hidden(
+                        token::Ident(sym::i32, IdentIsRaw::No),
+                        sp(8, 11),
+                    ),
+                ]),
+            ),
+            TokenTree::Delimited(
+                DelimSpan::from_pair(sp(13, 14), sp(18, 19)),
+                // First `Alone` because the `{` is followed by whitespace,
+                // second `Alone` because the `}` is followed immediately by
+                // EOF.
+                DelimSpacing::new(Spacing::Alone, Spacing::Alone),
+                Delimiter::Brace,
+                TokenStream::new(vec![
+                    TokenTree::token_joint(
+                        token::Ident(Symbol::intern("b"), IdentIsRaw::No),
+                        sp(15, 16),
+                    ),
+                    // `Alone` because the `;` is followed by whitespace.
+                    TokenTree::token_alone(token::Semi, sp(16, 17)),
+                ]),
+            ),
+        ]);
+
+        assert_eq!(tts, expected);
+    })
+}
+
+#[test]
+fn parse_use() {
+    create_default_session_globals_then(|| {
+        let use_s = "use foo::bar::baz;";
+        let vitem = string_to_item(use_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], use_s);
+
+        let use_s = "use foo::bar as baz;";
+        let vitem = string_to_item(use_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], use_s);
+    })
+}
+
+#[test]
+fn parse_extern_crate() {
+    create_default_session_globals_then(|| {
+        let ex_s = "extern crate foo;";
+        let vitem = string_to_item(ex_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], ex_s);
+
+        let ex_s = "extern crate foo as bar;";
+        let vitem = string_to_item(ex_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], ex_s);
+    })
+}
+
+fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
+    let item = string_to_item(src.to_string()).unwrap();
+
+    struct PatIdentVisitor {
+        spans: Vec<Span>,
+    }
+    impl<'a> visit::Visitor<'a> for PatIdentVisitor {
+        fn visit_pat(&mut self, p: &'a ast::Pat) {
+            match &p.kind {
+                PatKind::Ident(_, ident, _) => {
+                    self.spans.push(ident.span);
+                }
+                _ => {
+                    visit::walk_pat(self, p);
+                }
+            }
+        }
+    }
+    let mut v = PatIdentVisitor { spans: Vec::new() };
+    visit::walk_item(&mut v, &item);
+    return v.spans;
+}
+
+#[test]
+fn span_of_self_arg_pat_idents_are_correct() {
+    create_default_session_globals_then(|| {
+        let srcs = [
+            "impl z { fn a (&self, &myarg: i32) {} }",
+            "impl z { fn a (&mut self, &myarg: i32) {} }",
+            "impl z { fn a (&'a self, &myarg: i32) {} }",
+            "impl z { fn a (self, &myarg: i32) {} }",
+            "impl z { fn a (self: Foo, &myarg: i32) {} }",
+        ];
+
+        for src in srcs {
+            let spans = get_spans_of_pat_idents(src);
+            let (lo, hi) = (spans[0].lo(), spans[0].hi());
+            assert!(
+                "self" == &src[lo.to_usize()..hi.to_usize()],
+                "\"{}\" != \"self\". src=\"{}\"",
+                &src[lo.to_usize()..hi.to_usize()],
+                src
+            )
+        }
+    })
+}
+
+#[test]
+fn parse_exprs() {
+    create_default_session_globals_then(|| {
+        // just make sure that they parse....
+        string_to_expr("3 + 4".to_string());
+        string_to_expr("a::z.froob(b,&(987+3))".to_string());
+    })
+}
+
+#[test]
+fn attrs_fix_bug() {
+    create_default_session_globals_then(|| {
+        string_to_item(
+            "pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
+                -> Result<Box<Writer>, String> {
+#[cfg(windows)]
+fn wb() -> c_int {
+    (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
+}
+
+#[cfg(unix)]
+fn wb() -> c_int { O_WRONLY as c_int }
+
+let mut fflags: c_int = wb();
+}"
+            .to_string(),
+        );
+    })
+}
+
+#[test]
+fn crlf_doc_comments() {
+    create_default_session_globals_then(|| {
+        let psess = psess();
+
+        let name_1 = FileName::Custom("crlf_source_1".to_string());
+        let source = "/// doc comment\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_1, source, &psess).unwrap().unwrap();
+        let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
+        assert_eq!(doc.as_str(), " doc comment");
+
+        let name_2 = FileName::Custom("crlf_source_2".to_string());
+        let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_2, source, &psess).unwrap().unwrap();
+        let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
+        let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")];
+        assert_eq!(&docs[..], b);
+
+        let name_3 = FileName::Custom("clrf_source_3".to_string());
+        let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_3, source, &psess).unwrap().unwrap();
+        let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
+        assert_eq!(doc.as_str(), " doc comment\n *  with CRLF ");
+    });
+}
+
+#[test]
+fn ttdelim_span() {
+    fn parse_expr_from_source_str(
+        name: FileName,
+        source: String,
+        psess: &ParseSess,
+    ) -> PResult<'_, P<ast::Expr>> {
+        unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source)).parse_expr()
+    }
+
+    create_default_session_globals_then(|| {
+        let psess = psess();
+        let expr = parse_expr_from_source_str(
+            PathBuf::from("foo").into(),
+            "foo!( fn main() { body } )".to_string(),
+            &psess,
+        )
+        .unwrap();
+
+        let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
+        let span = mac.args.tokens.iter().last().unwrap().span();
+
+        match psess.source_map().span_to_snippet(span) {
+            Ok(s) => assert_eq!(&s[..], "{ body }"),
+            Err(_) => panic!("could not get snippet"),
+        }
+    });
+}
+
+#[track_caller]
+fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) {
+    // Do the `assert_eq` outside the closure so that `track_caller` works.
+    // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure
+    // doesn't give the line number in the test below if the assertion fails.)
+    let tok = p.look_ahead(dist, |tok| *tok);
+    assert_eq!(kind, tok.kind);
+}
+
+#[test]
+fn look_ahead() {
+    create_default_session_globals_then(|| {
+        let sym_f = Symbol::intern("f");
+        let sym_x = Symbol::intern("x");
+        #[allow(non_snake_case)]
+        let sym_S = Symbol::intern("S");
+        let raw_no = IdentIsRaw::No;
+
+        let psess = psess();
+        let mut p = string_to_parser(&psess, "fn f(x: u32) { x } struct S;".to_string());
+
+        // Current position is the `fn`.
+        look(&p, 0, token::Ident(kw::Fn, raw_no));
+        look(&p, 1, token::Ident(sym_f, raw_no));
+        look(&p, 2, token::OpenParen);
+        look(&p, 3, token::Ident(sym_x, raw_no));
+        look(&p, 4, token::Colon);
+        look(&p, 5, token::Ident(sym::u32, raw_no));
+        look(&p, 6, token::CloseParen);
+        look(&p, 7, token::OpenBrace);
+        look(&p, 8, token::Ident(sym_x, raw_no));
+        look(&p, 9, token::CloseBrace);
+        look(&p, 10, token::Ident(kw::Struct, raw_no));
+        look(&p, 11, token::Ident(sym_S, raw_no));
+        look(&p, 12, token::Semi);
+        // Any lookahead past the end of the token stream returns `Eof`.
+        look(&p, 13, token::Eof);
+        look(&p, 14, token::Eof);
+        look(&p, 15, token::Eof);
+        look(&p, 100, token::Eof);
+
+        // Move forward to the first `x`.
+        for _ in 0..3 {
+            p.bump();
+        }
+        look(&p, 0, token::Ident(sym_x, raw_no));
+        look(&p, 1, token::Colon);
+        look(&p, 2, token::Ident(sym::u32, raw_no));
+        look(&p, 3, token::CloseParen);
+        look(&p, 4, token::OpenBrace);
+        look(&p, 5, token::Ident(sym_x, raw_no));
+        look(&p, 6, token::CloseBrace);
+        look(&p, 7, token::Ident(kw::Struct, raw_no));
+        look(&p, 8, token::Ident(sym_S, raw_no));
+        look(&p, 9, token::Semi);
+        look(&p, 10, token::Eof);
+        look(&p, 11, token::Eof);
+        look(&p, 100, token::Eof);
+
+        // Move forward to the `;`.
+        for _ in 0..9 {
+            p.bump();
+        }
+        look(&p, 0, token::Semi);
+        // Any lookahead past the end of the token stream returns `Eof`.
+        look(&p, 1, token::Eof);
+        look(&p, 100, token::Eof);
+
+        // Move one past the `;`, i.e. past the end of the token stream.
+        p.bump();
+        look(&p, 0, token::Eof);
+        look(&p, 1, token::Eof);
+        look(&p, 100, token::Eof);
+
+        // Bumping after Eof is idempotent.
+        p.bump();
+        look(&p, 0, token::Eof);
+        look(&p, 1, token::Eof);
+        look(&p, 100, token::Eof);
+    });
+}
+
+/// There used to be some buggy behaviour when using `look_ahead` not within
+/// the outermost token stream, which this test covers.
+#[test]
+fn look_ahead_non_outermost_stream() {
+    create_default_session_globals_then(|| {
+        let sym_f = Symbol::intern("f");
+        let sym_x = Symbol::intern("x");
+        #[allow(non_snake_case)]
+        let sym_S = Symbol::intern("S");
+        let raw_no = IdentIsRaw::No;
+
+        let psess = psess();
+        let mut p = string_to_parser(&psess, "mod m { fn f(x: u32) { x } struct S; }".to_string());
+
+        // Move forward to the `fn`, which is not within the outermost token
+        // stream (because it's inside the `mod { ... }`).
+        for _ in 0..3 {
+            p.bump();
+        }
+        look(&p, 0, token::Ident(kw::Fn, raw_no));
+        look(&p, 1, token::Ident(sym_f, raw_no));
+        look(&p, 2, token::OpenParen);
+        look(&p, 3, token::Ident(sym_x, raw_no));
+        look(&p, 4, token::Colon);
+        look(&p, 5, token::Ident(sym::u32, raw_no));
+        look(&p, 6, token::CloseParen);
+        look(&p, 7, token::OpenBrace);
+        look(&p, 8, token::Ident(sym_x, raw_no));
+        look(&p, 9, token::CloseBrace);
+        look(&p, 10, token::Ident(kw::Struct, raw_no));
+        look(&p, 11, token::Ident(sym_S, raw_no));
+        look(&p, 12, token::Semi);
+        look(&p, 13, token::CloseBrace);
+        // Any lookahead past the end of the token stream returns `Eof`.
+        look(&p, 14, token::Eof);
+        look(&p, 15, token::Eof);
+        look(&p, 100, token::Eof);
+    });
+}
+
+// FIXME(nnethercote) All the output is currently wrong.
+#[test]
+fn debug_lookahead() {
+    create_default_session_globals_then(|| {
+        let psess = psess();
+        let mut p = string_to_parser(&psess, "fn f(x: u32) { x } struct S;".to_string());
+
+        // Current position is the `fn`.
+        assert_eq!(
+            &format!("{:#?}", p.debug_lookahead(0)),
+            "Parser {
+    prev_token: Token {
+        kind: Question,
+        span: Span {
+            lo: BytePos(
+                0,
+            ),
+            hi: BytePos(
+                0,
+            ),
+            ctxt: #0,
+        },
+    },
+    tokens: [],
+    approx_token_stream_pos: 0,
+    ..
+}"
+        );
+        assert_eq!(
+            &format!("{:#?}", p.debug_lookahead(7)),
+            "Parser {
+    prev_token: Token {
+        kind: Question,
+        span: Span {
+            lo: BytePos(
+                0,
+            ),
+            hi: BytePos(
+                0,
+            ),
+            ctxt: #0,
+        },
+    },
+    tokens: [
+        Ident(
+            \"fn\",
+            No,
+        ),
+        Ident(
+            \"f\",
+            No,
+        ),
+        OpenParen,
+        Ident(
+            \"x\",
+            No,
+        ),
+        Colon,
+        Ident(
+            \"u32\",
+            No,
+        ),
+        CloseParen,
+    ],
+    approx_token_stream_pos: 0,
+    ..
+}"
+        );
+        // There are 13 tokens. We request 15, get 14; the last one is `Eof`.
+        assert_eq!(
+            &format!("{:#?}", p.debug_lookahead(15)),
+            "Parser {
+    prev_token: Token {
+        kind: Question,
+        span: Span {
+            lo: BytePos(
+                0,
+            ),
+            hi: BytePos(
+                0,
+            ),
+            ctxt: #0,
+        },
+    },
+    tokens: [
+        Ident(
+            \"fn\",
+            No,
+        ),
+        Ident(
+            \"f\",
+            No,
+        ),
+        OpenParen,
+        Ident(
+            \"x\",
+            No,
+        ),
+        Colon,
+        Ident(
+            \"u32\",
+            No,
+        ),
+        CloseParen,
+        OpenBrace,
+        Ident(
+            \"x\",
+            No,
+        ),
+        CloseBrace,
+        Ident(
+            \"struct\",
+            No,
+        ),
+        Ident(
+            \"S\",
+            No,
+        ),
+        Semi,
+        Eof,
+    ],
+    approx_token_stream_pos: 0,
+    ..
+}"
+        );
+
+        // Move forward to the second `x`.
+        for _ in 0..8 {
+            p.bump();
+        }
+        assert_eq!(
+            &format!("{:#?}", p.debug_lookahead(1)),
+            "Parser {
+    prev_token: Token {
+        kind: OpenBrace,
+        span: Span {
+            lo: BytePos(
+                13,
+            ),
+            hi: BytePos(
+                14,
+            ),
+            ctxt: #0,
+        },
+    },
+    tokens: [
+        Ident(
+            \"x\",
+            No,
+        ),
+    ],
+    approx_token_stream_pos: 8,
+    ..
+}"
+        );
+        assert_eq!(
+            &format!("{:#?}", p.debug_lookahead(4)),
+            "Parser {
+    prev_token: Token {
+        kind: OpenBrace,
+        span: Span {
+            lo: BytePos(
+                13,
+            ),
+            hi: BytePos(
+                14,
+            ),
+            ctxt: #0,
+        },
+    },
+    tokens: [
+        Ident(
+            \"x\",
+            No,
+        ),
+        CloseBrace,
+        Ident(
+            \"struct\",
+            No,
+        ),
+        Ident(
+            \"S\",
+            No,
+        ),
+    ],
+    approx_token_stream_pos: 8,
+    ..
+}"
+        );
+
+        // Move two past the final token (the `;`).
+        for _ in 0..6 {
+            p.bump();
+        }
+        assert_eq!(
+            &format!("{:#?}", p.debug_lookahead(3)),
+            "Parser {
+    prev_token: Token {
+        kind: Eof,
+        span: Span {
+            lo: BytePos(
+                27,
+            ),
+            hi: BytePos(
+                28,
+            ),
+            ctxt: #0,
+        },
+    },
+    tokens: [
+        Eof,
+    ],
+    approx_token_stream_pos: 14,
+    ..
+}"
+        );
+    });
+}
+
+// This tests that when parsing a string (rather than a file) we don't try
+// and read in a file for a module declaration and just parse a stub.
+// See `recurse_into_file_modules` in the parser.
+#[test]
+fn out_of_line_mod() {
+    create_default_session_globals_then(|| {
+        let item = parse_item_from_source_str(
+            PathBuf::from("foo").into(),
+            "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
+            &psess(),
+        )
+        .unwrap()
+        .unwrap();
+
+        let ast::ItemKind::Mod(_, _, mod_kind) = &item.kind else { panic!() };
+        assert_matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2);
+    });
+}
+
+#[test]
+fn eqmodws() {
+    assert_eq!(matches_codepattern("", ""), true);
+    assert_eq!(matches_codepattern("", "a"), false);
+    assert_eq!(matches_codepattern("a", ""), false);
+    assert_eq!(matches_codepattern("a", "a"), true);
+    assert_eq!(matches_codepattern("a b", "a   \n\t\r  b"), true);
+    assert_eq!(matches_codepattern("a b ", "a   \n\t\r  b"), true);
+    assert_eq!(matches_codepattern("a b", "a   \n\t\r  b "), false);
+    assert_eq!(matches_codepattern("a   b", "a b"), true);
+    assert_eq!(matches_codepattern("ab", "a b"), false);
+    assert_eq!(matches_codepattern("a   b", "ab"), true);
+    assert_eq!(matches_codepattern(" a   b", "ab"), true);
+}
+
+#[test]
+fn pattern_whitespace() {
+    assert_eq!(matches_codepattern("", "\x0C"), false);
+    assert_eq!(matches_codepattern("a b ", "a   \u{0085}\n\t\r  b"), true);
+    assert_eq!(matches_codepattern("a b", "a   \u{0085}\n\t\r  b "), false);
+}
+
+#[test]
+fn non_pattern_whitespace() {
+    // These have the property 'White_Space' but not 'Pattern_White_Space'
+    assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
+    assert_eq!(matches_codepattern("a   b", "a\u{2002}b"), false);
+    assert_eq!(matches_codepattern("\u{205F}a   b", "ab"), false);
+    assert_eq!(matches_codepattern("a  \u{3000}b", "ab"), false);
+}
diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs
new file mode 100644
index 00000000000..b91548196a3
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/token_type.rs
@@ -0,0 +1,624 @@
+use rustc_ast::token::TokenKind;
+use rustc_span::symbol::{Symbol, kw, sym};
+
+/// Used in "expected"/"expected one of" error messages. Tokens are added here
+/// as necessary. Tokens with values (e.g. literals, identifiers) are
+/// represented by a single variant (e.g. `Literal`, `Ident`).
+///
+/// It's an awkward representation, but it's important for performance. It's a
+/// C-style parameterless enum so that `TokenTypeSet` can be a bitset. This is
+/// important because `Parser::expected_token_types` is very hot. `TokenType`
+/// used to have variants with parameters (e.g. all the keywords were in a
+/// single `Keyword` variant with a `Symbol` parameter) and
+/// `Parser::expected_token_types` was a `Vec<TokenType>` which was much slower
+/// to manipulate.
+///
+/// We really want to keep the number of variants to 128 or fewer, so that
+/// `TokenTypeSet` can be implemented with a `u128`.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum TokenType {
+    // Expression-operator symbols
+    Eq,
+    Lt,
+    Le,
+    EqEq,
+    Gt,
+    AndAnd,
+    OrOr,
+    Bang,
+    Tilde,
+
+    // BinOps
+    Plus,
+    Minus,
+    Star,
+    And,
+    Or,
+
+    // Structural symbols
+    At,
+    Dot,
+    DotDot,
+    DotDotDot,
+    DotDotEq,
+    Comma,
+    Semi,
+    Colon,
+    PathSep,
+    RArrow,
+    FatArrow,
+    Pound,
+    Question,
+    OpenParen,
+    CloseParen,
+    OpenBrace,
+    CloseBrace,
+    OpenBracket,
+    CloseBracket,
+    Eof,
+
+    // Token types with some details elided.
+    /// Any operator.
+    Operator,
+    /// Any identifier token.
+    Ident,
+    /// Any lifetime token.
+    Lifetime,
+    /// Any token that can start a path.
+    Path,
+    /// Any token that can start a type.
+    Type,
+    /// Any token that can start a const expression.
+    Const,
+
+    // Keywords
+    // tidy-alphabetical-start
+    KwAs,
+    KwAsync,
+    KwAuto,
+    KwAwait,
+    KwBecome,
+    KwBox,
+    KwBreak,
+    KwCatch,
+    KwConst,
+    KwContinue,
+    KwContractEnsures,
+    KwContractRequires,
+    KwCrate,
+    KwDefault,
+    KwDyn,
+    KwElse,
+    KwEnum,
+    KwExtern,
+    KwFn,
+    KwFor,
+    KwGen,
+    KwIf,
+    KwImpl,
+    KwIn,
+    KwLet,
+    KwLoop,
+    KwMacro,
+    KwMacroRules,
+    KwMatch,
+    KwMod,
+    KwMove,
+    KwMut,
+    KwPub,
+    KwRaw,
+    KwRef,
+    KwReturn,
+    KwReuse,
+    KwSafe,
+    KwSelfUpper,
+    KwStatic,
+    KwStruct,
+    KwSuper,
+    KwTrait,
+    KwTry,
+    KwType,
+    KwUnderscore,
+    KwUnsafe,
+    KwUse,
+    KwWhere,
+    KwWhile,
+    KwYield,
+    // tidy-alphabetical-end
+
+    // Keyword-like symbols.
+    // tidy-alphabetical-start
+    SymAttSyntax,
+    SymClobberAbi,
+    SymInlateout,
+    SymInout,
+    SymIs,
+    SymLabel,
+    SymLateout,
+    SymMayUnwind,
+    SymNomem,
+    SymNoreturn,
+    SymNostack,
+    SymOptions,
+    SymOut,
+    SymPreservesFlags,
+    SymPure,
+    SymReadonly,
+    SymSym,
+    // tidy-alphabetical-end
+}
+
+// Macro to avoid repetitive boilerplate code.
+macro_rules! from_u32_match {
+    ($val:ident; $($tok:ident,)+) => {
+        // A more obvious formulation would be `0 => TokenType::Eq`. But
+        // this formulation with the guard lets us avoid specifying a
+        // specific integer for each variant.
+        match $val {
+            $(
+                t if t == TokenType::$tok as u32 => TokenType::$tok,
+            )+
+            _ => panic!("unhandled value: {}", $val),
+        }
+    };
+}
+
+impl TokenType {
+    fn from_u32(val: u32) -> TokenType {
+        let token_type = from_u32_match! { val;
+            Eq,
+            Lt,
+            Le,
+            EqEq,
+            Gt,
+            AndAnd,
+            OrOr,
+            Bang,
+            Tilde,
+
+            Plus,
+            Minus,
+            Star,
+            And,
+            Or,
+
+            At,
+            Dot,
+            DotDot,
+            DotDotDot,
+            DotDotEq,
+            Comma,
+            Semi,
+            Colon,
+            PathSep,
+            RArrow,
+            FatArrow,
+            Pound,
+            Question,
+            OpenParen,
+            CloseParen,
+            OpenBrace,
+            CloseBrace,
+            OpenBracket,
+            CloseBracket,
+            Eof,
+
+            Operator,
+            Ident,
+            Lifetime,
+            Path,
+            Type,
+            Const,
+
+            KwAs,
+            KwAsync,
+            KwAuto,
+            KwAwait,
+            KwBecome,
+            KwBox,
+            KwBreak,
+            KwCatch,
+            KwConst,
+            KwContinue,
+            KwContractEnsures,
+            KwContractRequires,
+            KwCrate,
+            KwDefault,
+            KwDyn,
+            KwElse,
+            KwEnum,
+            KwExtern,
+            KwFn,
+            KwFor,
+            KwGen,
+            KwIf,
+            KwImpl,
+            KwIn,
+            KwLet,
+            KwLoop,
+            KwMacro,
+            KwMacroRules,
+            KwMatch,
+            KwMod,
+            KwMove,
+            KwMut,
+            KwPub,
+            KwRaw,
+            KwRef,
+            KwReturn,
+            KwReuse,
+            KwSafe,
+            KwSelfUpper,
+            KwStatic,
+            KwStruct,
+            KwSuper,
+            KwTrait,
+            KwTry,
+            KwType,
+            KwUnderscore,
+            KwUnsafe,
+            KwUse,
+            KwWhere,
+            KwWhile,
+            KwYield,
+
+            SymAttSyntax,
+            SymClobberAbi,
+            SymInlateout,
+            SymInout,
+            SymIs,
+            SymLabel,
+            SymLateout,
+            SymMayUnwind,
+            SymNomem,
+            SymNoreturn,
+            SymNostack,
+            SymOptions,
+            SymOut,
+            SymPreservesFlags,
+            SymPure,
+            SymReadonly,
+            SymSym,
+        };
+        token_type
+    }
+
+    pub(super) fn is_keyword(&self) -> Option<Symbol> {
+        match self {
+            TokenType::KwAs => Some(kw::As),
+            TokenType::KwAsync => Some(kw::Async),
+            TokenType::KwAuto => Some(kw::Auto),
+            TokenType::KwAwait => Some(kw::Await),
+            TokenType::KwBecome => Some(kw::Become),
+            TokenType::KwBox => Some(kw::Box),
+            TokenType::KwBreak => Some(kw::Break),
+            TokenType::KwCatch => Some(kw::Catch),
+            TokenType::KwConst => Some(kw::Const),
+            TokenType::KwContinue => Some(kw::Continue),
+            TokenType::KwContractEnsures => Some(kw::ContractEnsures),
+            TokenType::KwContractRequires => Some(kw::ContractRequires),
+            TokenType::KwCrate => Some(kw::Crate),
+            TokenType::KwDefault => Some(kw::Default),
+            TokenType::KwDyn => Some(kw::Dyn),
+            TokenType::KwElse => Some(kw::Else),
+            TokenType::KwEnum => Some(kw::Enum),
+            TokenType::KwExtern => Some(kw::Extern),
+            TokenType::KwFn => Some(kw::Fn),
+            TokenType::KwFor => Some(kw::For),
+            TokenType::KwGen => Some(kw::Gen),
+            TokenType::KwIf => Some(kw::If),
+            TokenType::KwImpl => Some(kw::Impl),
+            TokenType::KwIn => Some(kw::In),
+            TokenType::KwLet => Some(kw::Let),
+            TokenType::KwLoop => Some(kw::Loop),
+            TokenType::KwMacroRules => Some(kw::MacroRules),
+            TokenType::KwMacro => Some(kw::Macro),
+            TokenType::KwMatch => Some(kw::Match),
+            TokenType::KwMod => Some(kw::Mod),
+            TokenType::KwMove => Some(kw::Move),
+            TokenType::KwMut => Some(kw::Mut),
+            TokenType::KwPub => Some(kw::Pub),
+            TokenType::KwRaw => Some(kw::Raw),
+            TokenType::KwRef => Some(kw::Ref),
+            TokenType::KwReturn => Some(kw::Return),
+            TokenType::KwReuse => Some(kw::Reuse),
+            TokenType::KwSafe => Some(kw::Safe),
+            TokenType::KwSelfUpper => Some(kw::SelfUpper),
+            TokenType::KwStatic => Some(kw::Static),
+            TokenType::KwStruct => Some(kw::Struct),
+            TokenType::KwSuper => Some(kw::Super),
+            TokenType::KwTrait => Some(kw::Trait),
+            TokenType::KwTry => Some(kw::Try),
+            TokenType::KwType => Some(kw::Type),
+            TokenType::KwUnderscore => Some(kw::Underscore),
+            TokenType::KwUnsafe => Some(kw::Unsafe),
+            TokenType::KwUse => Some(kw::Use),
+            TokenType::KwWhere => Some(kw::Where),
+            TokenType::KwWhile => Some(kw::While),
+            TokenType::KwYield => Some(kw::Yield),
+
+            TokenType::SymAttSyntax => Some(sym::att_syntax),
+            TokenType::SymClobberAbi => Some(sym::clobber_abi),
+            TokenType::SymInlateout => Some(sym::inlateout),
+            TokenType::SymInout => Some(sym::inout),
+            TokenType::SymIs => Some(sym::is),
+            TokenType::SymLabel => Some(sym::label),
+            TokenType::SymLateout => Some(sym::lateout),
+            TokenType::SymMayUnwind => Some(sym::may_unwind),
+            TokenType::SymNomem => Some(sym::nomem),
+            TokenType::SymNoreturn => Some(sym::noreturn),
+            TokenType::SymNostack => Some(sym::nostack),
+            TokenType::SymOptions => Some(sym::options),
+            TokenType::SymOut => Some(sym::out),
+            TokenType::SymPreservesFlags => Some(sym::preserves_flags),
+            TokenType::SymPure => Some(sym::pure),
+            TokenType::SymReadonly => Some(sym::readonly),
+            TokenType::SymSym => Some(sym::sym),
+            _ => None,
+        }
+    }
+
+    // The output should be the same as that produced by
+    // `rustc_ast_pretty::pprust::token_to_string`.
+    pub(super) fn to_string(&self) -> String {
+        match self {
+            TokenType::Eq => "`=`",
+            TokenType::Lt => "`<`",
+            TokenType::Le => "`<=`",
+            TokenType::EqEq => "`==`",
+            TokenType::Gt => "`>`",
+            TokenType::AndAnd => "`&&`",
+            TokenType::OrOr => "`||`",
+            TokenType::Bang => "`!`",
+            TokenType::Tilde => "`~`",
+
+            TokenType::Plus => "`+`",
+            TokenType::Minus => "`-`",
+            TokenType::Star => "`*`",
+            TokenType::And => "`&`",
+            TokenType::Or => "`|`",
+
+            TokenType::At => "`@`",
+            TokenType::Dot => "`.`",
+            TokenType::DotDot => "`..`",
+            TokenType::DotDotDot => "`...`",
+            TokenType::DotDotEq => "`..=`",
+            TokenType::Comma => "`,`",
+            TokenType::Semi => "`;`",
+            TokenType::Colon => "`:`",
+            TokenType::PathSep => "`::`",
+            TokenType::RArrow => "`->`",
+            TokenType::FatArrow => "`=>`",
+            TokenType::Pound => "`#`",
+            TokenType::Question => "`?`",
+            TokenType::OpenParen => "`(`",
+            TokenType::CloseParen => "`)`",
+            TokenType::OpenBrace => "`{`",
+            TokenType::CloseBrace => "`}`",
+            TokenType::OpenBracket => "`[`",
+            TokenType::CloseBracket => "`]`",
+            TokenType::Eof => "<eof>",
+
+            TokenType::Operator => "an operator",
+            TokenType::Ident => "identifier",
+            TokenType::Lifetime => "lifetime",
+            TokenType::Path => "path",
+            TokenType::Type => "type",
+            TokenType::Const => "a const expression",
+
+            _ => return format!("`{}`", self.is_keyword().unwrap()),
+        }
+        .to_string()
+    }
+}
+
+/// Used by various `Parser` methods such as `check` and `eat`. The first field
+/// is always by used those methods. The second field is only used when the
+/// first field doesn't match.
+#[derive(Clone, Copy, Debug)]
+pub struct ExpTokenPair<'a> {
+    pub tok: &'a TokenKind,
+    pub token_type: TokenType,
+}
+
+/// Used by various `Parser` methods such as `check_keyword` and `eat_keyword`.
+/// The first field is always used by those methods. The second field is only
+/// used when the first field doesn't match.
+#[derive(Clone, Copy)]
+pub struct ExpKeywordPair {
+    pub kw: Symbol,
+    pub token_type: TokenType,
+}
+
+// Gets a statically-known `ExpTokenPair` pair (for non-keywords) or
+// `ExpKeywordPair` (for keywords), as used with various `check`/`expect`
+// methods in `Parser`.
+//
+// The name is short because it's used a lot.
+#[macro_export]
+// We don't use the normal `#[rustfmt::skip]` here because that triggers a
+// bogus "macro-expanded `macro_export` macros from the current crate cannot be
+// referred to by absolute paths" error, ugh. See #52234.
+#[cfg_attr(rustfmt, rustfmt::skip)]
+macro_rules! exp {
+    // `ExpTokenPair` helper rules.
+    (@tok, $tok:ident) => {
+        $crate::parser::token_type::ExpTokenPair {
+            tok: &rustc_ast::token::$tok,
+            token_type: $crate::parser::token_type::TokenType::$tok
+        }
+    };
+
+    // `ExpKeywordPair` helper rules.
+    (@kw, $kw:ident, $token_type:ident) => {
+        $crate::parser::token_type::ExpKeywordPair {
+            kw: rustc_span::symbol::kw::$kw,
+            token_type: $crate::parser::token_type::TokenType::$token_type,
+        }
+    };
+    (@sym, $kw:ident, $token_type:ident) => {
+        $crate::parser::token_type::ExpKeywordPair {
+            kw: rustc_span::symbol::sym::$kw,
+            token_type: $crate::parser::token_type::TokenType::$token_type,
+        }
+    };
+
+    (Eq)             => { exp!(@tok, Eq) };
+    (Lt)             => { exp!(@tok, Lt) };
+    (Le)             => { exp!(@tok, Le) };
+    (EqEq)           => { exp!(@tok, EqEq) };
+    (Gt)             => { exp!(@tok, Gt) };
+    (AndAnd)         => { exp!(@tok, AndAnd) };
+    (OrOr)           => { exp!(@tok, OrOr) };
+    (Bang)           => { exp!(@tok, Bang) };
+    (Tilde)          => { exp!(@tok, Tilde) };
+    (Plus)           => { exp!(@tok, Plus) };
+    (Minus)          => { exp!(@tok, Minus) };
+    (Star)           => { exp!(@tok, Star) };
+    (And)            => { exp!(@tok, And) };
+    (Or)             => { exp!(@tok, Or) };
+    (At)             => { exp!(@tok, At) };
+    (Dot)            => { exp!(@tok, Dot) };
+    (DotDot)         => { exp!(@tok, DotDot) };
+    (DotDotDot)      => { exp!(@tok, DotDotDot) };
+    (DotDotEq)       => { exp!(@tok, DotDotEq) };
+    (Comma)          => { exp!(@tok, Comma) };
+    (Semi)           => { exp!(@tok, Semi) };
+    (Colon)          => { exp!(@tok, Colon) };
+    (PathSep)        => { exp!(@tok, PathSep) };
+    (RArrow)         => { exp!(@tok, RArrow) };
+    (FatArrow)       => { exp!(@tok, FatArrow) };
+    (Pound)          => { exp!(@tok, Pound) };
+    (Question)       => { exp!(@tok, Question) };
+    (Eof)            => { exp!(@tok, Eof) };
+
+    (OpenParen)      => { exp!(@tok, OpenParen) };
+    (OpenBrace)      => { exp!(@tok, OpenBrace) };
+    (OpenBracket)    => { exp!(@tok, OpenBracket) };
+    (CloseParen)     => { exp!(@tok, CloseParen) };
+    (CloseBrace)     => { exp!(@tok, CloseBrace) };
+    (CloseBracket)   => { exp!(@tok, CloseBracket) };
+
+    (As)             => { exp!(@kw, As,         KwAs) };
+    (Async)          => { exp!(@kw, Async,      KwAsync) };
+    (Auto)           => { exp!(@kw, Auto,       KwAuto) };
+    (Await)          => { exp!(@kw, Await,      KwAwait) };
+    (Become)         => { exp!(@kw, Become,     KwBecome) };
+    (Box)            => { exp!(@kw, Box,        KwBox) };
+    (Break)          => { exp!(@kw, Break,      KwBreak) };
+    (Catch)          => { exp!(@kw, Catch,      KwCatch) };
+    (Const)          => { exp!(@kw, Const,      KwConst) };
+    (Continue)       => { exp!(@kw, Continue,   KwContinue) };
+    (ContractEnsures)  => { exp!(@kw, ContractEnsures, KwContractEnsures) };
+    (ContractRequires) => { exp!(@kw, ContractRequires, KwContractRequires) };
+    (Crate)          => { exp!(@kw, Crate,      KwCrate) };
+    (Default)        => { exp!(@kw, Default,    KwDefault) };
+    (Dyn)            => { exp!(@kw, Dyn,        KwDyn) };
+    (Else)           => { exp!(@kw, Else,       KwElse) };
+    (Enum)           => { exp!(@kw, Enum,       KwEnum) };
+    (Extern)         => { exp!(@kw, Extern,     KwExtern) };
+    (Fn)             => { exp!(@kw, Fn,         KwFn) };
+    (For)            => { exp!(@kw, For,        KwFor) };
+    (Gen)            => { exp!(@kw, Gen,        KwGen) };
+    (If)             => { exp!(@kw, If,         KwIf) };
+    (Impl)           => { exp!(@kw, Impl,       KwImpl) };
+    (In)             => { exp!(@kw, In,         KwIn) };
+    (Let)            => { exp!(@kw, Let,        KwLet) };
+    (Loop)           => { exp!(@kw, Loop,       KwLoop) };
+    (Macro)          => { exp!(@kw, Macro,      KwMacro) };
+    (MacroRules)     => { exp!(@kw, MacroRules, KwMacroRules) };
+    (Match)          => { exp!(@kw, Match,      KwMatch) };
+    (Mod)            => { exp!(@kw, Mod,        KwMod) };
+    (Move)           => { exp!(@kw, Move,       KwMove) };
+    (Mut)            => { exp!(@kw, Mut,        KwMut) };
+    (Pub)            => { exp!(@kw, Pub,        KwPub) };
+    (Raw)            => { exp!(@kw, Raw,        KwRaw) };
+    (Ref)            => { exp!(@kw, Ref,        KwRef) };
+    (Return)         => { exp!(@kw, Return,     KwReturn) };
+    (Reuse)          => { exp!(@kw, Reuse,      KwReuse) };
+    (Safe)           => { exp!(@kw, Safe,       KwSafe) };
+    (SelfUpper)      => { exp!(@kw, SelfUpper,  KwSelfUpper) };
+    (Static)         => { exp!(@kw, Static,     KwStatic) };
+    (Struct)         => { exp!(@kw, Struct,     KwStruct) };
+    (Super)          => { exp!(@kw, Super,      KwSuper) };
+    (Trait)          => { exp!(@kw, Trait,      KwTrait) };
+    (Try)            => { exp!(@kw, Try,        KwTry) };
+    (Type)           => { exp!(@kw, Type,       KwType) };
+    (Underscore)     => { exp!(@kw, Underscore, KwUnderscore) };
+    (Unsafe)         => { exp!(@kw, Unsafe,     KwUnsafe) };
+    (Use)            => { exp!(@kw, Use,        KwUse) };
+    (Where)          => { exp!(@kw, Where,      KwWhere) };
+    (While)          => { exp!(@kw, While,      KwWhile) };
+    (Yield)          => { exp!(@kw, Yield,      KwYield) };
+
+    (AttSyntax)      => { exp!(@sym, att_syntax,      SymAttSyntax) };
+    (ClobberAbi)     => { exp!(@sym, clobber_abi,     SymClobberAbi) };
+    (Inlateout)      => { exp!(@sym, inlateout,       SymInlateout) };
+    (Inout)          => { exp!(@sym, inout,           SymInout) };
+    (Is)             => { exp!(@sym, is,              SymIs) };
+    (Label)          => { exp!(@sym, label,           SymLabel) };
+    (Lateout)        => { exp!(@sym, lateout,         SymLateout) };
+    (MayUnwind)      => { exp!(@sym, may_unwind,      SymMayUnwind) };
+    (Nomem)          => { exp!(@sym, nomem,           SymNomem) };
+    (Noreturn)       => { exp!(@sym, noreturn,        SymNoreturn) };
+    (Nostack)        => { exp!(@sym, nostack,         SymNostack) };
+    (Options)        => { exp!(@sym, options,         SymOptions) };
+    (Out)            => { exp!(@sym, out,             SymOut) };
+    (PreservesFlags) => { exp!(@sym, preserves_flags, SymPreservesFlags) };
+    (Pure)           => { exp!(@sym, pure,            SymPure) };
+    (Readonly)       => { exp!(@sym, readonly,        SymReadonly) };
+    (Sym)            => { exp!(@sym, sym,             SymSym) };
+}
+
+/// A bitset type designed specifically for `Parser::expected_token_types`,
+/// which is very hot. `u128` is the smallest integer that will fit every
+/// `TokenType` value.
+#[derive(Clone, Copy)]
+pub(super) struct TokenTypeSet(u128);
+
+impl TokenTypeSet {
+    pub(super) fn new() -> TokenTypeSet {
+        TokenTypeSet(0)
+    }
+
+    pub(super) fn is_empty(&self) -> bool {
+        self.0 == 0
+    }
+
+    pub(super) fn insert(&mut self, token_type: TokenType) {
+        self.0 = self.0 | (1u128 << token_type as u32)
+    }
+
+    pub(super) fn clear(&mut self) {
+        self.0 = 0
+    }
+
+    pub(super) fn contains(&self, token_type: TokenType) -> bool {
+        self.0 & (1u128 << token_type as u32) != 0
+    }
+
+    pub(super) fn iter(&self) -> TokenTypeSetIter {
+        TokenTypeSetIter(*self)
+    }
+}
+
+// The `TokenTypeSet` is a copy of the set being iterated. It initially holds
+// the entire set. Each bit is cleared as it is returned. We have finished once
+// it is all zeroes.
+pub(super) struct TokenTypeSetIter(TokenTypeSet);
+
+impl Iterator for TokenTypeSetIter {
+    type Item = TokenType;
+
+    fn next(&mut self) -> Option<TokenType> {
+        let num_bits: u32 = (size_of_val(&self.0.0) * 8) as u32;
+        assert_eq!(num_bits, 128);
+        let z = self.0.0.trailing_zeros();
+        if z == num_bits {
+            None
+        } else {
+            self.0.0 &= !(1 << z); // clear the trailing 1 bit
+            Some(TokenType::from_u32(z))
+        }
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/tokenstream/tests.rs b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
new file mode 100644
index 00000000000..19b2c98f5af
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
@@ -0,0 +1,114 @@
+#![allow(rustc::symbol_intern_string_literal)]
+
+use rustc_ast::token::{self, IdentIsRaw};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_span::{BytePos, Span, Symbol, create_default_session_globals_then};
+
+use crate::parser::tests::string_to_stream;
+
+fn string_to_ts(string: &str) -> TokenStream {
+    string_to_stream(string.to_owned())
+}
+
+fn sp(a: u32, b: u32) -> Span {
+    Span::with_root_ctxt(BytePos(a), BytePos(b))
+}
+
+fn cmp_token_stream(a: &TokenStream, b: &TokenStream) -> bool {
+    a.len() == b.len() && a.iter().zip(b.iter()).all(|(x, y)| x.eq_unspanned(y))
+}
+
+#[test]
+fn test_concat() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("foo::bar::baz");
+        let test_fst = string_to_ts("foo::bar");
+        let test_snd = string_to_ts("::baz");
+        let mut eq_res = TokenStream::default();
+        eq_res.push_stream(test_fst);
+        eq_res.push_stream(test_snd);
+        assert_eq!(test_res.iter().count(), 5);
+        assert_eq!(eq_res.iter().count(), 5);
+        assert_eq!(cmp_token_stream(&test_res, &eq_res), true);
+    })
+}
+
+#[test]
+fn test_to_from_bijection() {
+    create_default_session_globals_then(|| {
+        let test_start = string_to_ts("foo::bar(baz)");
+        let test_end = test_start.iter().cloned().collect();
+        assert_eq!(test_start, test_end)
+    })
+}
+
+#[test]
+fn test_eq_0() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("foo");
+        let test_eqs = string_to_ts("foo");
+        assert_eq!(test_res, test_eqs)
+    })
+}
+
+#[test]
+fn test_eq_1() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("::bar::baz");
+        let test_eqs = string_to_ts("::bar::baz");
+        assert_eq!(test_res, test_eqs)
+    })
+}
+
+#[test]
+fn test_eq_3() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("");
+        let test_eqs = string_to_ts("");
+        assert_eq!(test_res, test_eqs)
+    })
+}
+
+#[test]
+fn test_diseq_0() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("::bar::baz");
+        let test_eqs = string_to_ts("bar::baz");
+        assert_eq!(test_res == test_eqs, false)
+    })
+}
+
+#[test]
+fn test_diseq_1() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("(bar,baz)");
+        let test_eqs = string_to_ts("bar,baz");
+        assert_eq!(test_res == test_eqs, false)
+    })
+}
+
+#[test]
+fn test_is_empty() {
+    create_default_session_globals_then(|| {
+        let test0 = TokenStream::default();
+        let test1 =
+            TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1));
+        let test2 = string_to_ts("foo(bar::baz)");
+
+        assert_eq!(test0.is_empty(), true);
+        assert_eq!(test1.is_empty(), false);
+        assert_eq!(test2.is_empty(), false);
+    })
+}
+
+#[test]
+fn test_dotdotdot() {
+    create_default_session_globals_then(|| {
+        let mut stream = TokenStream::default();
+        stream.push_tree(TokenTree::token_joint(token::Dot, sp(0, 1)));
+        stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
+        stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
+        assert!(cmp_token_stream(&stream, &string_to_ts("...")));
+        assert_eq!(stream.iter().count(), 1);
+    })
+}
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
new file mode 100644
index 00000000000..59048e42e6f
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -0,0 +1,1470 @@
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, IdentIsRaw, MetaVarKind, Token, TokenKind};
+use rustc_ast::util::case::Case;
+use rustc_ast::{
+    self as ast, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnPtrTy, FnRetTy,
+    GenericBound, GenericBounds, GenericParam, Generics, Lifetime, MacCall, MutTy, Mutability,
+    Pinnedness, PolyTraitRef, PreciseCapturingArg, TraitBoundModifiers, TraitObjectSyntax, Ty,
+    TyKind, UnsafeBinderTy,
+};
+use rustc_data_structures::stack::ensure_sufficient_stack;
+use rustc_errors::{Applicability, Diag, PResult};
+use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym};
+use thin_vec::{ThinVec, thin_vec};
+
+use super::{Parser, PathStyle, SeqSep, TokenType, Trailing};
+use crate::errors::{
+    self, AttributeOnEmptyType, AttributeOnType, DynAfterMut, ExpectedFnPathFoundFnKeyword,
+    ExpectedMutOrConstInRawPointerType, FnPtrWithGenerics, FnPtrWithGenericsSugg,
+    HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
+    NestedCVariadicType, ReturnTypesUseThinArrow,
+};
+use crate::parser::item::FrontMatterParsingMode;
+use crate::{exp, maybe_recover_from_interpolated_ty_qpath};
+
+/// Signals whether parsing a type should allow `+`.
+///
+/// For example, let T be the type `impl Default + 'static`
+/// With `AllowPlus::Yes`, T will be parsed successfully
+/// With `AllowPlus::No`, parsing T will return a parse error
+#[derive(Copy, Clone, PartialEq)]
+pub(super) enum AllowPlus {
+    Yes,
+    No,
+}
+
+#[derive(PartialEq)]
+pub(super) enum RecoverQPath {
+    Yes,
+    No,
+}
+
+pub(super) enum RecoverQuestionMark {
+    Yes,
+    No,
+}
+
+/// Signals whether parsing a type should recover `->`.
+///
+/// More specifically, when parsing a function like:
+/// ```compile_fail
+/// fn foo() => u8 { 0 }
+/// fn bar(): u8 { 0 }
+/// ```
+/// The compiler will try to recover interpreting `foo() => u8` as `foo() -> u8` when calling
+/// `parse_ty` with anything except `RecoverReturnSign::No`, and it will try to recover `bar(): u8`
+/// as `bar() -> u8` when passing `RecoverReturnSign::Yes` to `parse_ty`
+#[derive(Copy, Clone, PartialEq)]
+pub(super) enum RecoverReturnSign {
+    Yes,
+    OnlyFatArrow,
+    No,
+}
+
+impl RecoverReturnSign {
+    /// [RecoverReturnSign::Yes] allows for recovering `fn foo() => u8` and `fn foo(): u8`,
+    /// [RecoverReturnSign::OnlyFatArrow] allows for recovering only `fn foo() => u8` (recovering
+    /// colons can cause problems when parsing where clauses), and
+    /// [RecoverReturnSign::No] doesn't allow for any recovery of the return type arrow
+    fn can_recover(self, token: &TokenKind) -> bool {
+        match self {
+            Self::Yes => matches!(token, token::FatArrow | token::Colon),
+            Self::OnlyFatArrow => matches!(token, token::FatArrow),
+            Self::No => false,
+        }
+    }
+}
+
+// Is `...` (`CVarArgs`) legal at this level of type parsing?
+#[derive(PartialEq)]
+enum AllowCVariadic {
+    Yes,
+    No,
+}
+
+/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
+/// `IDENT<<u8 as Trait>::AssocTy>`.
+///
+/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
+/// that `IDENT` is not the ident of a fn trait.
+fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
+    t == &token::PathSep || t == &token::Lt || t == &token::Shl
+}
+
+fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
+    // `Not`, `Tilde` & `Const` are deliberately not part of this list to
+    // contain the number of potential regressions esp. in MBE code.
+    // `Const` would regress `rfc-2632-const-trait-impl/mbe-dyn-const-2015.rs`.
+    // `Not` would regress `dyn!(...)` macro calls in Rust 2015.
+    t.is_path_start()
+        || t.is_lifetime()
+        || t == &TokenKind::Question
+        || t.is_keyword(kw::For)
+        || t == &TokenKind::OpenParen
+}
+
+impl<'a> Parser<'a> {
+    /// Parses a type.
+    pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
+        // Make sure deeply nested types don't overflow the stack.
+        ensure_sufficient_stack(|| {
+            self.parse_ty_common(
+                AllowPlus::Yes,
+                AllowCVariadic::No,
+                RecoverQPath::Yes,
+                RecoverReturnSign::Yes,
+                None,
+                RecoverQuestionMark::Yes,
+            )
+        })
+    }
+
+    pub(super) fn parse_ty_with_generics_recovery(
+        &mut self,
+        ty_params: &Generics,
+    ) -> PResult<'a, P<Ty>> {
+        self.parse_ty_common(
+            AllowPlus::Yes,
+            AllowCVariadic::No,
+            RecoverQPath::Yes,
+            RecoverReturnSign::Yes,
+            Some(ty_params),
+            RecoverQuestionMark::Yes,
+        )
+    }
+
+    /// Parse a type suitable for a function or function pointer parameter.
+    /// The difference from `parse_ty` is that this version allows `...`
+    /// (`CVarArgs`) at the top level of the type.
+    pub(super) fn parse_ty_for_param(&mut self) -> PResult<'a, P<Ty>> {
+        self.parse_ty_common(
+            AllowPlus::Yes,
+            AllowCVariadic::Yes,
+            RecoverQPath::Yes,
+            RecoverReturnSign::Yes,
+            None,
+            RecoverQuestionMark::Yes,
+        )
+    }
+
+    /// Parses a type in restricted contexts where `+` is not permitted.
+    ///
+    /// Example 1: `&'a TYPE`
+    ///     `+` is prohibited to maintain operator priority (P(+) < P(&)).
+    /// Example 2: `value1 as TYPE + value2`
+    ///     `+` is prohibited to avoid interactions with expression grammar.
+    pub(super) fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
+        self.parse_ty_common(
+            AllowPlus::No,
+            AllowCVariadic::No,
+            RecoverQPath::Yes,
+            RecoverReturnSign::Yes,
+            None,
+            RecoverQuestionMark::Yes,
+        )
+    }
+
+    /// Parses a type following an `as` cast. Similar to `parse_ty_no_plus`, but signaling origin
+    /// for better diagnostics involving `?`.
+    pub(super) fn parse_as_cast_ty(&mut self) -> PResult<'a, P<Ty>> {
+        self.parse_ty_common(
+            AllowPlus::No,
+            AllowCVariadic::No,
+            RecoverQPath::Yes,
+            RecoverReturnSign::Yes,
+            None,
+            RecoverQuestionMark::No,
+        )
+    }
+
+    pub(super) fn parse_ty_no_question_mark_recover(&mut self) -> PResult<'a, P<Ty>> {
+        self.parse_ty_common(
+            AllowPlus::Yes,
+            AllowCVariadic::No,
+            RecoverQPath::Yes,
+            RecoverReturnSign::Yes,
+            None,
+            RecoverQuestionMark::No,
+        )
+    }
+
+    /// Parse a type without recovering `:` as `->` to avoid breaking code such
+    /// as `where fn() : for<'a>`.
+    pub(super) fn parse_ty_for_where_clause(&mut self) -> PResult<'a, P<Ty>> {
+        self.parse_ty_common(
+            AllowPlus::Yes,
+            AllowCVariadic::No,
+            RecoverQPath::Yes,
+            RecoverReturnSign::OnlyFatArrow,
+            None,
+            RecoverQuestionMark::Yes,
+        )
+    }
+
+    /// Parses an optional return type `[ -> TY ]` in a function declaration.
+    pub(super) fn parse_ret_ty(
+        &mut self,
+        allow_plus: AllowPlus,
+        recover_qpath: RecoverQPath,
+        recover_return_sign: RecoverReturnSign,
+    ) -> PResult<'a, FnRetTy> {
+        let lo = self.prev_token.span;
+        Ok(if self.eat(exp!(RArrow)) {
+            // FIXME(Centril): Can we unconditionally `allow_plus`?
+            let ty = self.parse_ty_common(
+                allow_plus,
+                AllowCVariadic::No,
+                recover_qpath,
+                recover_return_sign,
+                None,
+                RecoverQuestionMark::Yes,
+            )?;
+            FnRetTy::Ty(ty)
+        } else if recover_return_sign.can_recover(&self.token.kind) {
+            // Don't `eat` to prevent `=>` from being added as an expected token which isn't
+            // actually expected and could only confuse users
+            self.bump();
+            self.dcx().emit_err(ReturnTypesUseThinArrow {
+                span: self.prev_token.span,
+                suggestion: lo.between(self.token.span),
+            });
+            let ty = self.parse_ty_common(
+                allow_plus,
+                AllowCVariadic::No,
+                recover_qpath,
+                recover_return_sign,
+                None,
+                RecoverQuestionMark::Yes,
+            )?;
+            FnRetTy::Ty(ty)
+        } else {
+            FnRetTy::Default(self.prev_token.span.shrink_to_hi())
+        })
+    }
+
+    fn parse_ty_common(
+        &mut self,
+        allow_plus: AllowPlus,
+        allow_c_variadic: AllowCVariadic,
+        recover_qpath: RecoverQPath,
+        recover_return_sign: RecoverReturnSign,
+        ty_generics: Option<&Generics>,
+        recover_question_mark: RecoverQuestionMark,
+    ) -> PResult<'a, P<Ty>> {
+        let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes;
+        maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
+        if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) {
+            let attrs_wrapper = self.parse_outer_attributes()?;
+            let raw_attrs = attrs_wrapper.take_for_recovery(self.psess);
+            let attr_span = raw_attrs[0].span.to(raw_attrs.last().unwrap().span);
+            let (full_span, guar) = match self.parse_ty() {
+                Ok(ty) => {
+                    let full_span = attr_span.until(ty.span);
+                    let guar = self
+                        .dcx()
+                        .emit_err(AttributeOnType { span: attr_span, fix_span: full_span });
+                    (attr_span, guar)
+                }
+                Err(err) => {
+                    err.cancel();
+                    let guar = self.dcx().emit_err(AttributeOnEmptyType { span: attr_span });
+                    (attr_span, guar)
+                }
+            };
+
+            return Ok(self.mk_ty(full_span, TyKind::Err(guar)));
+        }
+        if let Some(ty) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Ty { .. }),
+            |this| this.parse_ty_no_question_mark_recover(),
+        ) {
+            return Ok(ty);
+        }
+
+        let lo = self.token.span;
+        let mut impl_dyn_multi = false;
+        let kind = if self.check(exp!(OpenParen)) {
+            self.parse_ty_tuple_or_parens(lo, allow_plus)?
+        } else if self.eat(exp!(Bang)) {
+            // Never type `!`
+            TyKind::Never
+        } else if self.eat(exp!(Star)) {
+            self.parse_ty_ptr()?
+        } else if self.eat(exp!(OpenBracket)) {
+            self.parse_array_or_slice_ty()?
+        } else if self.check(exp!(And)) || self.check(exp!(AndAnd)) {
+            // Reference
+            self.expect_and()?;
+            self.parse_borrowed_pointee()?
+        } else if self.eat_keyword_noexpect(kw::Typeof) {
+            self.parse_typeof_ty()?
+        } else if self.eat_keyword(exp!(Underscore)) {
+            // A type to be inferred `_`
+            TyKind::Infer
+        } else if self.check_fn_front_matter(false, Case::Sensitive) {
+            // Function pointer type
+            self.parse_ty_fn_ptr(lo, ThinVec::new(), None, recover_return_sign)?
+        } else if self.check_keyword(exp!(For)) {
+            // Function pointer type or bound list (trait object type) starting with a poly-trait.
+            //   `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
+            //   `for<'lt> Trait1<'lt> + Trait2 + 'a`
+            let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
+            if self.check_fn_front_matter(false, Case::Sensitive) {
+                self.parse_ty_fn_ptr(
+                    lo,
+                    lifetime_defs,
+                    Some(self.prev_token.span.shrink_to_lo()),
+                    recover_return_sign,
+                )?
+            } else {
+                // Try to recover `for<'a> dyn Trait` or `for<'a> impl Trait`.
+                if self.may_recover()
+                    && (self.eat_keyword_noexpect(kw::Impl) || self.eat_keyword_noexpect(kw::Dyn))
+                {
+                    let kw = self.prev_token.ident().unwrap().0;
+                    let removal_span = kw.span.with_hi(self.token.span.lo());
+                    let path = self.parse_path(PathStyle::Type)?;
+                    let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
+                    let kind = self.parse_remaining_bounds_path(
+                        lifetime_defs,
+                        path,
+                        lo,
+                        parse_plus,
+                        ast::Parens::No,
+                    )?;
+                    let err = self.dcx().create_err(errors::TransposeDynOrImpl {
+                        span: kw.span,
+                        kw: kw.name.as_str(),
+                        sugg: errors::TransposeDynOrImplSugg {
+                            removal_span,
+                            insertion_span: lo.shrink_to_lo(),
+                            kw: kw.name.as_str(),
+                        },
+                    });
+
+                    // Take the parsed bare trait object and turn it either
+                    // into a `dyn` object or an `impl Trait`.
+                    let kind = match (kind, kw.name) {
+                        (TyKind::TraitObject(bounds, _), kw::Dyn) => {
+                            TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
+                        }
+                        (TyKind::TraitObject(bounds, _), kw::Impl) => {
+                            TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
+                        }
+                        _ => return Err(err),
+                    };
+                    err.emit();
+                    kind
+                } else {
+                    let path = self.parse_path(PathStyle::Type)?;
+                    let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
+                    self.parse_remaining_bounds_path(
+                        lifetime_defs,
+                        path,
+                        lo,
+                        parse_plus,
+                        ast::Parens::No,
+                    )?
+                }
+            }
+        } else if self.eat_keyword(exp!(Impl)) {
+            self.parse_impl_ty(&mut impl_dyn_multi)?
+        } else if self.is_explicit_dyn_type() {
+            self.parse_dyn_ty(&mut impl_dyn_multi)?
+        } else if self.eat_lt() {
+            // Qualified path
+            let (qself, path) = self.parse_qpath(PathStyle::Type)?;
+            TyKind::Path(Some(qself), path)
+        } else if self.check_path() {
+            self.parse_path_start_ty(lo, allow_plus, ty_generics)?
+        } else if self.can_begin_bound() {
+            self.parse_bare_trait_object(lo, allow_plus)?
+        } else if self.eat(exp!(DotDotDot)) {
+            match allow_c_variadic {
+                AllowCVariadic::Yes => TyKind::CVarArgs,
+                AllowCVariadic::No => {
+                    // FIXME(c_variadic): Should we just allow `...` syntactically
+                    // anywhere in a type and use semantic restrictions instead?
+                    // NOTE: This may regress certain MBE calls if done incorrectly.
+                    let guar = self.dcx().emit_err(NestedCVariadicType { span: lo });
+                    TyKind::Err(guar)
+                }
+            }
+        } else if self.check_keyword(exp!(Unsafe))
+            && self.look_ahead(1, |tok| tok.kind == token::Lt)
+        {
+            self.parse_unsafe_binder_ty()?
+        } else {
+            let msg = format!("expected type, found {}", super::token_descr(&self.token));
+            let mut err = self.dcx().struct_span_err(lo, msg);
+            err.span_label(lo, "expected type");
+            return Err(err);
+        };
+
+        let span = lo.to(self.prev_token.span);
+        let mut ty = self.mk_ty(span, kind);
+
+        // Try to recover from use of `+` with incorrect priority.
+        match allow_plus {
+            AllowPlus::Yes => self.maybe_recover_from_bad_type_plus(&ty)?,
+            AllowPlus::No => self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty),
+        }
+        if let RecoverQuestionMark::Yes = recover_question_mark {
+            ty = self.maybe_recover_from_question_mark(ty);
+        }
+        if allow_qpath_recovery { self.maybe_recover_from_bad_qpath(ty) } else { Ok(ty) }
+    }
+
+    fn parse_unsafe_binder_ty(&mut self) -> PResult<'a, TyKind> {
+        let lo = self.token.span;
+        assert!(self.eat_keyword(exp!(Unsafe)));
+        self.expect_lt()?;
+        let generic_params = self.parse_generic_params()?;
+        self.expect_gt()?;
+        let inner_ty = self.parse_ty()?;
+        let span = lo.to(self.prev_token.span);
+        self.psess.gated_spans.gate(sym::unsafe_binders, span);
+
+        Ok(TyKind::UnsafeBinder(P(UnsafeBinderTy { generic_params, inner_ty })))
+    }
+
+    /// Parses either:
+    /// - `(TYPE)`, a parenthesized type.
+    /// - `(TYPE,)`, a tuple with a single field of type TYPE.
+    fn parse_ty_tuple_or_parens(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
+        let mut trailing_plus = false;
+        let (ts, trailing) = self.parse_paren_comma_seq(|p| {
+            let ty = p.parse_ty()?;
+            trailing_plus = p.prev_token == TokenKind::Plus;
+            Ok(ty)
+        })?;
+
+        if ts.len() == 1 && matches!(trailing, Trailing::No) {
+            let ty = ts.into_iter().next().unwrap();
+            let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
+            match ty.kind {
+                // `(TY_BOUND_NOPAREN) + BOUND + ...`.
+                TyKind::Path(None, path) if maybe_bounds => self.parse_remaining_bounds_path(
+                    ThinVec::new(),
+                    path,
+                    lo,
+                    true,
+                    ast::Parens::Yes,
+                ),
+                // For `('a) + …`, we know that `'a` in type position already lead to an error being
+                // emitted. To reduce output, let's indirectly suppress E0178 (bad `+` in type) and
+                // other irrelevant consequential errors.
+                TyKind::TraitObject(bounds, TraitObjectSyntax::None)
+                    if maybe_bounds && bounds.len() == 1 && !trailing_plus =>
+                {
+                    self.parse_remaining_bounds(bounds, true)
+                }
+                // `(TYPE)`
+                _ => Ok(TyKind::Paren(ty)),
+            }
+        } else {
+            Ok(TyKind::Tup(ts))
+        }
+    }
+
+    fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
+        // A lifetime only begins a bare trait object type if it is followed by `+`!
+        if self.token.is_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()) {
+            // In Rust 2021 and beyond, we assume that the user didn't intend to write a bare trait
+            // object type with a leading lifetime bound since that seems very unlikely given the
+            // fact that `dyn`-less trait objects are *semantically* invalid.
+            if self.psess.edition.at_least_rust_2021() {
+                let lt = self.expect_lifetime();
+                let mut err = self.dcx().struct_span_err(lo, "expected type, found lifetime");
+                err.span_label(lo, "expected type");
+                return Ok(match self.maybe_recover_ref_ty_no_leading_ampersand(lt, lo, err) {
+                    Ok(ref_ty) => ref_ty,
+                    Err(err) => TyKind::Err(err.emit()),
+                });
+            }
+
+            self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime {
+                span: lo,
+                suggestion: lo.shrink_to_hi(),
+            });
+        }
+        Ok(TyKind::TraitObject(
+            self.parse_generic_bounds_common(allow_plus)?,
+            TraitObjectSyntax::None,
+        ))
+    }
+
+    fn maybe_recover_ref_ty_no_leading_ampersand<'cx>(
+        &mut self,
+        lt: Lifetime,
+        lo: Span,
+        mut err: Diag<'cx>,
+    ) -> Result<TyKind, Diag<'cx>> {
+        if !self.may_recover() {
+            return Err(err);
+        }
+        let snapshot = self.create_snapshot_for_diagnostic();
+        let mutbl = self.parse_mutability();
+        match self.parse_ty_no_plus() {
+            Ok(ty) => {
+                err.span_suggestion_verbose(
+                    lo.shrink_to_lo(),
+                    "you might have meant to write a reference type here",
+                    "&",
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                Ok(TyKind::Ref(Some(lt), MutTy { ty, mutbl }))
+            }
+            Err(diag) => {
+                diag.cancel();
+                self.restore_snapshot(snapshot);
+                Err(err)
+            }
+        }
+    }
+
+    fn parse_remaining_bounds_path(
+        &mut self,
+        generic_params: ThinVec<GenericParam>,
+        path: ast::Path,
+        lo: Span,
+        parse_plus: bool,
+        parens: ast::Parens,
+    ) -> PResult<'a, TyKind> {
+        let poly_trait_ref = PolyTraitRef::new(
+            generic_params,
+            path,
+            TraitBoundModifiers::NONE,
+            lo.to(self.prev_token.span),
+            parens,
+        );
+        let bounds = vec![GenericBound::Trait(poly_trait_ref)];
+        self.parse_remaining_bounds(bounds, parse_plus)
+    }
+
+    /// Parse the remainder of a bare trait object type given an already parsed list.
+    fn parse_remaining_bounds(
+        &mut self,
+        mut bounds: GenericBounds,
+        plus: bool,
+    ) -> PResult<'a, TyKind> {
+        if plus {
+            self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
+            bounds.append(&mut self.parse_generic_bounds()?);
+        }
+        Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
+    }
+
+    /// Parses a raw pointer type: `*[const | mut] $type`.
+    fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
+        let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
+            let span = self.prev_token.span;
+            self.dcx().emit_err(ExpectedMutOrConstInRawPointerType {
+                span,
+                after_asterisk: span.shrink_to_hi(),
+            });
+            Mutability::Not
+        });
+        let ty = self.parse_ty_no_plus()?;
+        Ok(TyKind::Ptr(MutTy { ty, mutbl }))
+    }
+
+    /// Parses an array (`[TYPE; EXPR]`) or slice (`[TYPE]`) type.
+    /// The opening `[` bracket is already eaten.
+    fn parse_array_or_slice_ty(&mut self) -> PResult<'a, TyKind> {
+        let elt_ty = match self.parse_ty() {
+            Ok(ty) => ty,
+            Err(err)
+                if self.look_ahead(1, |t| *t == token::CloseBracket)
+                    | self.look_ahead(1, |t| *t == token::Semi) =>
+            {
+                // Recover from `[LIT; EXPR]` and `[LIT]`
+                self.bump();
+                let guar = err.emit();
+                self.mk_ty(self.prev_token.span, TyKind::Err(guar))
+            }
+            Err(err) => return Err(err),
+        };
+
+        let ty = if self.eat(exp!(Semi)) {
+            let mut length = self.parse_expr_anon_const()?;
+            if let Err(e) = self.expect(exp!(CloseBracket)) {
+                // Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
+                self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
+                self.expect(exp!(CloseBracket))?;
+            }
+            TyKind::Array(elt_ty, length)
+        } else if self.eat(exp!(CloseBracket)) {
+            TyKind::Slice(elt_ty)
+        } else {
+            self.maybe_recover_array_ty_without_semi(elt_ty)?
+        };
+
+        Ok(ty)
+    }
+
+    /// Recover from malformed array type syntax.
+    ///
+    /// This method attempts to recover from cases like:
+    /// - `[u8, 5]` → suggests using `;`, return a Array type
+    /// - `[u8 5]` → suggests using `;`, return a Array type
+    /// Consider to add more cases in the future.
+    fn maybe_recover_array_ty_without_semi(&mut self, elt_ty: P<Ty>) -> PResult<'a, TyKind> {
+        let span = self.token.span;
+        let token_descr = super::token_descr(&self.token);
+        let mut err =
+            self.dcx().struct_span_err(span, format!("expected `;` or `]`, found {}", token_descr));
+        err.span_label(span, "expected `;` or `]`");
+        err.note("you might have meant to write a slice or array type");
+
+        // If we cannot recover, return the error immediately.
+        if !self.may_recover() {
+            return Err(err);
+        }
+
+        let snapshot = self.create_snapshot_for_diagnostic();
+
+        let suggestion_span = if self.eat(exp!(Comma)) || self.eat(exp!(Star)) {
+            // Consume common erroneous separators.
+            self.prev_token.span
+        } else {
+            self.token.span.shrink_to_lo()
+        };
+
+        // we first try to parse pattern like `[u8 5]`
+        let length = match self.parse_expr_anon_const() {
+            Ok(length) => length,
+            Err(e) => {
+                e.cancel();
+                self.restore_snapshot(snapshot);
+                return Err(err);
+            }
+        };
+
+        if let Err(e) = self.expect(exp!(CloseBracket)) {
+            e.cancel();
+            self.restore_snapshot(snapshot);
+            return Err(err);
+        }
+
+        err.span_suggestion_verbose(
+            suggestion_span,
+            "you might have meant to use `;` as the separator",
+            ";",
+            Applicability::MaybeIncorrect,
+        );
+        err.emit();
+        Ok(TyKind::Array(elt_ty, length))
+    }
+
+    fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
+        let and_span = self.prev_token.span;
+        let mut opt_lifetime = self.check_lifetime().then(|| self.expect_lifetime());
+        let (pinned, mut mutbl) = match self.parse_pin_and_mut() {
+            Some(pin_mut) => pin_mut,
+            None => (Pinnedness::Not, self.parse_mutability()),
+        };
+        if self.token.is_lifetime() && mutbl == Mutability::Mut && opt_lifetime.is_none() {
+            // A lifetime is invalid here: it would be part of a bare trait bound, which requires
+            // it to be followed by a plus, but we disallow plus in the pointee type.
+            // So we can handle this case as an error here, and suggest `'a mut`.
+            // If there *is* a plus next though, handling the error later provides better suggestions
+            // (like adding parentheses)
+            if !self.look_ahead(1, |t| t.is_like_plus()) {
+                let lifetime_span = self.token.span;
+                let span = and_span.to(lifetime_span);
+
+                let (suggest_lifetime, snippet) =
+                    if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
+                        (Some(span), lifetime_src)
+                    } else {
+                        (None, String::new())
+                    };
+                self.dcx().emit_err(LifetimeAfterMut { span, suggest_lifetime, snippet });
+
+                opt_lifetime = Some(self.expect_lifetime());
+            }
+        } else if self.token.is_keyword(kw::Dyn)
+            && mutbl == Mutability::Not
+            && self.look_ahead(1, |t| t.is_keyword(kw::Mut))
+        {
+            // We have `&dyn mut ...`, which is invalid and should be `&mut dyn ...`.
+            let span = and_span.to(self.look_ahead(1, |t| t.span));
+            self.dcx().emit_err(DynAfterMut { span });
+
+            // Recovery
+            mutbl = Mutability::Mut;
+            let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing);
+            self.bump();
+            self.bump_with((dyn_tok, dyn_tok_sp));
+        }
+        let ty = self.parse_ty_no_plus()?;
+        Ok(match pinned {
+            Pinnedness::Not => TyKind::Ref(opt_lifetime, MutTy { ty, mutbl }),
+            Pinnedness::Pinned => TyKind::PinnedRef(opt_lifetime, MutTy { ty, mutbl }),
+        })
+    }
+
+    /// Parses `pin` and `mut` annotations on references.
+    ///
+    /// It must be either `pin const` or `pin mut`.
+    pub(crate) fn parse_pin_and_mut(&mut self) -> Option<(Pinnedness, Mutability)> {
+        if self.token.is_ident_named(sym::pin) {
+            let result = self.look_ahead(1, |token| {
+                if token.is_keyword(kw::Const) {
+                    Some((Pinnedness::Pinned, Mutability::Not))
+                } else if token.is_keyword(kw::Mut) {
+                    Some((Pinnedness::Pinned, Mutability::Mut))
+                } else {
+                    None
+                }
+            });
+            if result.is_some() {
+                self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
+                self.bump();
+                self.bump();
+            }
+            result
+        } else {
+            None
+        }
+    }
+
+    // Parses the `typeof(EXPR)`.
+    // To avoid ambiguity, the type is surrounded by parentheses.
+    fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
+        self.expect(exp!(OpenParen))?;
+        let expr = self.parse_expr_anon_const()?;
+        self.expect(exp!(CloseParen))?;
+        Ok(TyKind::Typeof(expr))
+    }
+
+    /// Parses a function pointer type (`TyKind::FnPtr`).
+    /// ```ignore (illustrative)
+    ///    [unsafe] [extern "ABI"] fn (S) -> T
+    /// //  ^~~~~^          ^~~~^     ^~^    ^
+    /// //    |               |        |     |
+    /// //    |               |        |   Return type
+    /// // Function Style    ABI  Parameter types
+    /// ```
+    /// We actually parse `FnHeader FnDecl`, but we error on `const` and `async` qualifiers.
+    fn parse_ty_fn_ptr(
+        &mut self,
+        lo: Span,
+        mut params: ThinVec<GenericParam>,
+        param_insertion_point: Option<Span>,
+        recover_return_sign: RecoverReturnSign,
+    ) -> PResult<'a, TyKind> {
+        let inherited_vis = rustc_ast::Visibility {
+            span: rustc_span::DUMMY_SP,
+            kind: rustc_ast::VisibilityKind::Inherited,
+            tokens: None,
+        };
+        let span_start = self.token.span;
+        let ast::FnHeader { ext, safety, .. } = self.parse_fn_front_matter(
+            &inherited_vis,
+            Case::Sensitive,
+            FrontMatterParsingMode::FunctionPtrType,
+        )?;
+        if self.may_recover() && self.token == TokenKind::Lt {
+            self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
+        }
+        let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
+
+        let decl_span = span_start.to(self.prev_token.span);
+        Ok(TyKind::FnPtr(P(FnPtrTy { ext, safety, generic_params: params, decl, decl_span })))
+    }
+
+    /// Recover from function pointer types with a generic parameter list (e.g. `fn<'a>(&'a str)`).
+    fn recover_fn_ptr_with_generics(
+        &mut self,
+        lo: Span,
+        params: &mut ThinVec<GenericParam>,
+        param_insertion_point: Option<Span>,
+    ) -> PResult<'a, ()> {
+        let generics = self.parse_generics()?;
+        let arity = generics.params.len();
+
+        let mut lifetimes: ThinVec<_> = generics
+            .params
+            .into_iter()
+            .filter(|param| matches!(param.kind, ast::GenericParamKind::Lifetime))
+            .collect();
+
+        let sugg = if !lifetimes.is_empty() {
+            let snippet =
+                lifetimes.iter().map(|param| param.ident.as_str()).intersperse(", ").collect();
+
+            let (left, snippet) = if let Some(span) = param_insertion_point {
+                (span, if params.is_empty() { snippet } else { format!(", {snippet}") })
+            } else {
+                (lo.shrink_to_lo(), format!("for<{snippet}> "))
+            };
+
+            Some(FnPtrWithGenericsSugg {
+                left,
+                snippet,
+                right: generics.span,
+                arity,
+                for_param_list_exists: param_insertion_point.is_some(),
+            })
+        } else {
+            None
+        };
+
+        self.dcx().emit_err(FnPtrWithGenerics { span: generics.span, sugg });
+        params.append(&mut lifetimes);
+        Ok(())
+    }
+
+    /// Parses an `impl B0 + ... + Bn` type.
+    fn parse_impl_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
+        if self.token.is_lifetime() {
+            self.look_ahead(1, |t| {
+                if let token::Ident(sym, _) = t.kind {
+                    // parse pattern with "'a Sized" we're supposed to give suggestion like
+                    // "'a + Sized"
+                    self.dcx().emit_err(errors::MissingPlusBounds {
+                        span: self.token.span,
+                        hi: self.token.span.shrink_to_hi(),
+                        sym,
+                    });
+                }
+            })
+        }
+
+        // Always parse bounds greedily for better error recovery.
+        let bounds = self.parse_generic_bounds()?;
+
+        *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
+
+        Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
+    }
+
+    fn parse_precise_capturing_args(
+        &mut self,
+    ) -> PResult<'a, (ThinVec<PreciseCapturingArg>, Span)> {
+        let lo = self.token.span;
+        self.expect_lt()?;
+        let (args, _, _) = self.parse_seq_to_before_tokens(
+            &[exp!(Gt)],
+            &[&TokenKind::Ge, &TokenKind::Shr, &TokenKind::Shr],
+            SeqSep::trailing_allowed(exp!(Comma)),
+            |self_| {
+                if self_.check_keyword(exp!(SelfUpper)) {
+                    self_.bump();
+                    Ok(PreciseCapturingArg::Arg(
+                        ast::Path::from_ident(self_.prev_token.ident().unwrap().0),
+                        DUMMY_NODE_ID,
+                    ))
+                } else if self_.check_ident() {
+                    Ok(PreciseCapturingArg::Arg(
+                        ast::Path::from_ident(self_.parse_ident()?),
+                        DUMMY_NODE_ID,
+                    ))
+                } else if self_.check_lifetime() {
+                    Ok(PreciseCapturingArg::Lifetime(self_.expect_lifetime()))
+                } else {
+                    self_.unexpected_any()
+                }
+            },
+        )?;
+        self.expect_gt()?;
+        Ok((args, lo.to(self.prev_token.span)))
+    }
+
+    /// Is a `dyn B0 + ... + Bn` type allowed here?
+    fn is_explicit_dyn_type(&mut self) -> bool {
+        self.check_keyword(exp!(Dyn))
+            && (self.token_uninterpolated_span().at_least_rust_2018()
+                || self.look_ahead(1, |t| {
+                    (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star)
+                        && !can_continue_type_after_non_fn_ident(t)
+                }))
+    }
+
+    /// Parses a `dyn B0 + ... + Bn` type.
+    ///
+    /// Note that this does *not* parse bare trait objects.
+    fn parse_dyn_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
+        self.bump(); // `dyn`
+
+        // We used to parse `*` for `dyn*` here.
+        let syntax = TraitObjectSyntax::Dyn;
+
+        // Always parse bounds greedily for better error recovery.
+        let bounds = self.parse_generic_bounds()?;
+        *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
+        Ok(TyKind::TraitObject(bounds, syntax))
+    }
+
+    /// Parses a type starting with a path.
+    ///
+    /// This can be:
+    /// 1. a type macro, `mac!(...)`,
+    /// 2. a bare trait object, `B0 + ... + Bn`,
+    /// 3. or a path, `path::to::MyType`.
+    fn parse_path_start_ty(
+        &mut self,
+        lo: Span,
+        allow_plus: AllowPlus,
+        ty_generics: Option<&Generics>,
+    ) -> PResult<'a, TyKind> {
+        // Simple path
+        let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
+        if self.eat(exp!(Bang)) {
+            // Macro invocation in type position
+            Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
+        } else if allow_plus == AllowPlus::Yes && self.check_plus() {
+            // `Trait1 + Trait2 + 'a`
+            self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true, ast::Parens::No)
+        } else {
+            // Just a type path.
+            Ok(TyKind::Path(None, path))
+        }
+    }
+
+    pub(super) fn parse_generic_bounds(&mut self) -> PResult<'a, GenericBounds> {
+        self.parse_generic_bounds_common(AllowPlus::Yes)
+    }
+
+    /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
+    ///
+    /// See `parse_generic_bound` for the `BOUND` grammar.
+    fn parse_generic_bounds_common(&mut self, allow_plus: AllowPlus) -> PResult<'a, GenericBounds> {
+        let mut bounds = Vec::new();
+
+        // In addition to looping while we find generic bounds:
+        // We continue even if we find a keyword. This is necessary for error recovery on,
+        // for example, `impl fn()`. The only keyword that can go after generic bounds is
+        // `where`, so stop if it's it.
+        // We also continue if we find types (not traits), again for error recovery.
+        while self.can_begin_bound()
+            || (self.may_recover()
+                && (self.token.can_begin_type()
+                    || (self.token.is_reserved_ident() && !self.token.is_keyword(kw::Where))))
+        {
+            if self.token.is_keyword(kw::Dyn) {
+                // Account for `&dyn Trait + dyn Other`.
+                self.bump();
+                self.dcx().emit_err(InvalidDynKeyword {
+                    span: self.prev_token.span,
+                    suggestion: self.prev_token.span.until(self.token.span),
+                });
+            }
+            bounds.push(self.parse_generic_bound()?);
+            if allow_plus == AllowPlus::No || !self.eat_plus() {
+                break;
+            }
+        }
+
+        Ok(bounds)
+    }
+
+    /// Can the current token begin a bound?
+    fn can_begin_bound(&mut self) -> bool {
+        self.check_path()
+            || self.check_lifetime()
+            || self.check(exp!(Bang))
+            || self.check(exp!(Question))
+            || self.check(exp!(Tilde))
+            || self.check_keyword(exp!(For))
+            || self.check(exp!(OpenParen))
+            || self.check(exp!(OpenBracket))
+            || self.check_keyword(exp!(Const))
+            || self.check_keyword(exp!(Async))
+            || self.check_keyword(exp!(Use))
+    }
+
+    /// Parses a bound according to the grammar:
+    /// ```ebnf
+    /// BOUND = TY_BOUND | LT_BOUND
+    /// ```
+    fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
+        let lo = self.token.span;
+        let leading_token = self.prev_token;
+        let parens = if self.eat(exp!(OpenParen)) { ast::Parens::Yes } else { ast::Parens::No };
+
+        let bound = if self.token.is_lifetime() {
+            self.parse_generic_lt_bound(lo, parens)?
+        } else if self.eat_keyword(exp!(Use)) {
+            // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
+            // lifetimes and ident params (including SelfUpper). These are validated later
+            // for order, duplication, and whether they actually reference params.
+            let use_span = self.prev_token.span;
+            let (args, args_span) = self.parse_precise_capturing_args()?;
+            GenericBound::Use(args, use_span.to(args_span))
+        } else {
+            self.parse_generic_ty_bound(lo, parens, &leading_token)?
+        };
+
+        Ok(bound)
+    }
+
+    /// Parses a lifetime ("outlives") bound, e.g. `'a`, according to:
+    /// ```ebnf
+    /// LT_BOUND = LIFETIME
+    /// ```
+    fn parse_generic_lt_bound(
+        &mut self,
+        lo: Span,
+        parens: ast::Parens,
+    ) -> PResult<'a, GenericBound> {
+        let lt = self.expect_lifetime();
+        let bound = GenericBound::Outlives(lt);
+        if let ast::Parens::Yes = parens {
+            // FIXME(Centril): Consider not erroring here and accepting `('lt)` instead,
+            // possibly introducing `GenericBound::Paren(P<GenericBound>)`?
+            self.recover_paren_lifetime(lo)?;
+        }
+        Ok(bound)
+    }
+
+    /// Emits an error if any trait bound modifiers were present.
+    fn error_lt_bound_with_modifiers(
+        &self,
+        modifiers: TraitBoundModifiers,
+        binder_span: Option<Span>,
+    ) -> ErrorGuaranteed {
+        let TraitBoundModifiers { constness, asyncness, polarity } = modifiers;
+
+        match constness {
+            BoundConstness::Never => {}
+            BoundConstness::Always(span) | BoundConstness::Maybe(span) => {
+                return self
+                    .dcx()
+                    .emit_err(errors::ModifierLifetime { span, modifier: constness.as_str() });
+            }
+        }
+
+        match polarity {
+            BoundPolarity::Positive => {}
+            BoundPolarity::Negative(span) | BoundPolarity::Maybe(span) => {
+                return self
+                    .dcx()
+                    .emit_err(errors::ModifierLifetime { span, modifier: polarity.as_str() });
+            }
+        }
+
+        match asyncness {
+            BoundAsyncness::Normal => {}
+            BoundAsyncness::Async(span) => {
+                return self
+                    .dcx()
+                    .emit_err(errors::ModifierLifetime { span, modifier: asyncness.as_str() });
+            }
+        }
+
+        if let Some(span) = binder_span {
+            return self.dcx().emit_err(errors::ModifierLifetime { span, modifier: "for<...>" });
+        }
+
+        unreachable!("lifetime bound intercepted in `parse_generic_ty_bound` but no modifiers?")
+    }
+
+    /// Recover on `('lifetime)` with `(` already eaten.
+    fn recover_paren_lifetime(&mut self, lo: Span) -> PResult<'a, ()> {
+        self.expect(exp!(CloseParen))?;
+        let span = lo.to(self.prev_token.span);
+        let sugg = errors::RemoveParens { lo, hi: self.prev_token.span };
+
+        self.dcx().emit_err(errors::ParenthesizedLifetime { span, sugg });
+        Ok(())
+    }
+
+    /// Parses the modifiers that may precede a trait in a bound, e.g. `?Trait` or `[const] Trait`.
+    ///
+    /// If no modifiers are present, this does not consume any tokens.
+    ///
+    /// ```ebnf
+    /// CONSTNESS = [["["] "const" ["]"]]
+    /// ASYNCNESS = ["async"]
+    /// POLARITY = ["?" | "!"]
+    /// ```
+    ///
+    /// See `parse_generic_ty_bound` for the complete grammar of trait bound modifiers.
+    fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
+        let modifier_lo = self.token.span;
+        let constness = self.parse_bound_constness()?;
+
+        let asyncness = if self.token_uninterpolated_span().at_least_rust_2018()
+            && self.eat_keyword(exp!(Async))
+        {
+            self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
+            BoundAsyncness::Async(self.prev_token.span)
+        } else if self.may_recover()
+            && self.token_uninterpolated_span().is_rust_2015()
+            && self.is_kw_followed_by_ident(kw::Async)
+        {
+            self.bump(); // eat `async`
+            self.dcx().emit_err(errors::AsyncBoundModifierIn2015 {
+                span: self.prev_token.span,
+                help: HelpUseLatestEdition::new(),
+            });
+            self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
+            BoundAsyncness::Async(self.prev_token.span)
+        } else {
+            BoundAsyncness::Normal
+        };
+        let modifier_hi = self.prev_token.span;
+
+        let polarity = if self.eat(exp!(Question)) {
+            BoundPolarity::Maybe(self.prev_token.span)
+        } else if self.eat(exp!(Bang)) {
+            self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
+            BoundPolarity::Negative(self.prev_token.span)
+        } else {
+            BoundPolarity::Positive
+        };
+
+        // Enforce the mutual-exclusivity of `const`/`async` and `?`/`!`.
+        match polarity {
+            BoundPolarity::Positive => {
+                // All trait bound modifiers allowed to combine with positive polarity
+            }
+            BoundPolarity::Maybe(polarity_span) | BoundPolarity::Negative(polarity_span) => {
+                match (asyncness, constness) {
+                    (BoundAsyncness::Normal, BoundConstness::Never) => {
+                        // Ok, no modifiers.
+                    }
+                    (_, _) => {
+                        let constness = constness.as_str();
+                        let asyncness = asyncness.as_str();
+                        let glue =
+                            if !constness.is_empty() && !asyncness.is_empty() { " " } else { "" };
+                        let modifiers_concatenated = format!("{constness}{glue}{asyncness}");
+                        self.dcx().emit_err(errors::PolarityAndModifiers {
+                            polarity_span,
+                            polarity: polarity.as_str(),
+                            modifiers_span: modifier_lo.to(modifier_hi),
+                            modifiers_concatenated,
+                        });
+                    }
+                }
+            }
+        }
+
+        Ok(TraitBoundModifiers { constness, asyncness, polarity })
+    }
+
+    pub fn parse_bound_constness(&mut self) -> PResult<'a, BoundConstness> {
+        // FIXME(const_trait_impl): remove `~const` parser support once bootstrap has the new syntax
+        // in rustfmt
+        Ok(if self.eat(exp!(Tilde)) {
+            let tilde = self.prev_token.span;
+            self.expect_keyword(exp!(Const))?;
+            let span = tilde.to(self.prev_token.span);
+            self.psess.gated_spans.gate(sym::const_trait_impl, span);
+            BoundConstness::Maybe(span)
+        } else if self.check(exp!(OpenBracket))
+            && self.look_ahead(1, |t| t.is_keyword(kw::Const))
+            && self.look_ahead(2, |t| *t == token::CloseBracket)
+        {
+            let start = self.token.span;
+            self.bump();
+            self.expect_keyword(exp!(Const)).unwrap();
+            self.bump();
+            let span = start.to(self.prev_token.span);
+            self.psess.gated_spans.gate(sym::const_trait_impl, span);
+            BoundConstness::Maybe(span)
+        } else if self.eat_keyword(exp!(Const)) {
+            self.psess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
+            BoundConstness::Always(self.prev_token.span)
+        } else {
+            BoundConstness::Never
+        })
+    }
+
+    /// Parses a type bound according to:
+    /// ```ebnf
+    /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
+    /// TY_BOUND_NOPAREN = [for<GENERIC_PARAMS> CONSTNESS ASYNCNESS | POLARITY] SIMPLE_PATH
+    /// ```
+    ///
+    /// For example, this grammar accepts `for<'a: 'b> [const] ?m::Trait<'a>`.
+    fn parse_generic_ty_bound(
+        &mut self,
+        lo: Span,
+        parens: ast::Parens,
+        leading_token: &Token,
+    ) -> PResult<'a, GenericBound> {
+        let (mut lifetime_defs, binder_span) = self.parse_late_bound_lifetime_defs()?;
+
+        let modifiers_lo = self.token.span;
+        let modifiers = self.parse_trait_bound_modifiers()?;
+        let modifiers_span = modifiers_lo.to(self.prev_token.span);
+
+        if let Some(binder_span) = binder_span {
+            match modifiers.polarity {
+                BoundPolarity::Negative(polarity_span) | BoundPolarity::Maybe(polarity_span) => {
+                    self.dcx().emit_err(errors::BinderAndPolarity {
+                        binder_span,
+                        polarity_span,
+                        polarity: modifiers.polarity.as_str(),
+                    });
+                }
+                BoundPolarity::Positive => {}
+            }
+        }
+
+        // Recover erroneous lifetime bound with modifiers or binder.
+        // e.g. `T: for<'a> 'a` or `T: [const] 'a`.
+        if self.token.is_lifetime() {
+            let _: ErrorGuaranteed = self.error_lt_bound_with_modifiers(modifiers, binder_span);
+            return self.parse_generic_lt_bound(lo, parens);
+        }
+
+        if let (more_lifetime_defs, Some(binder_span)) = self.parse_late_bound_lifetime_defs()? {
+            lifetime_defs.extend(more_lifetime_defs);
+            self.dcx().emit_err(errors::BinderBeforeModifiers { binder_span, modifiers_span });
+        }
+
+        let mut path = if self.token.is_keyword(kw::Fn)
+            && self.look_ahead(1, |t| *t == TokenKind::OpenParen)
+            && let Some(path) = self.recover_path_from_fn()
+        {
+            path
+        } else if !self.token.is_path_start() && self.token.can_begin_type() {
+            let ty = self.parse_ty_no_plus()?;
+            // Instead of finding a path (a trait), we found a type.
+            let mut err = self.dcx().struct_span_err(ty.span, "expected a trait, found type");
+
+            // If we can recover, try to extract a path from the type. Note
+            // that we do not use the try operator when parsing the type because
+            // if it fails then we get a parser error which we don't want (we're trying
+            // to recover from errors, not make more).
+            let path = if self.may_recover() {
+                let (span, message, sugg, path, applicability) = match &ty.kind {
+                    TyKind::Ptr(..) | TyKind::Ref(..)
+                        if let TyKind::Path(_, path) = &ty.peel_refs().kind =>
+                    {
+                        (
+                            ty.span.until(path.span),
+                            "consider removing the indirection",
+                            "",
+                            path,
+                            Applicability::MaybeIncorrect,
+                        )
+                    }
+                    TyKind::ImplTrait(_, bounds)
+                        if let [GenericBound::Trait(tr, ..), ..] = bounds.as_slice() =>
+                    {
+                        (
+                            ty.span.until(tr.span),
+                            "use the trait bounds directly",
+                            "",
+                            &tr.trait_ref.path,
+                            Applicability::MachineApplicable,
+                        )
+                    }
+                    _ => return Err(err),
+                };
+
+                err.span_suggestion_verbose(span, message, sugg, applicability);
+
+                path.clone()
+            } else {
+                return Err(err);
+            };
+
+            err.emit();
+
+            path
+        } else {
+            self.parse_path(PathStyle::Type)?
+        };
+
+        if self.may_recover() && self.token == TokenKind::OpenParen {
+            self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?;
+        }
+
+        if let ast::Parens::Yes = parens {
+            // Someone has written something like `&dyn (Trait + Other)`. The correct code
+            // would be `&(dyn Trait + Other)`
+            if self.token.is_like_plus() && leading_token.is_keyword(kw::Dyn) {
+                let bounds = vec![];
+                self.parse_remaining_bounds(bounds, true)?;
+                self.expect(exp!(CloseParen))?;
+                self.dcx().emit_err(errors::IncorrectParensTraitBounds {
+                    span: vec![lo, self.prev_token.span],
+                    sugg: errors::IncorrectParensTraitBoundsSugg {
+                        wrong_span: leading_token.span.shrink_to_hi().to(lo),
+                        new_span: leading_token.span.shrink_to_lo(),
+                    },
+                });
+            } else {
+                self.expect(exp!(CloseParen))?;
+            }
+        }
+
+        let poly_trait =
+            PolyTraitRef::new(lifetime_defs, path, modifiers, lo.to(self.prev_token.span), parens);
+        Ok(GenericBound::Trait(poly_trait))
+    }
+
+    // recovers a `Fn(..)` parenthesized-style path from `fn(..)`
+    fn recover_path_from_fn(&mut self) -> Option<ast::Path> {
+        let fn_token_span = self.token.span;
+        self.bump();
+        let args_lo = self.token.span;
+        let snapshot = self.create_snapshot_for_diagnostic();
+        match self.parse_fn_decl(|_| false, AllowPlus::No, RecoverReturnSign::OnlyFatArrow) {
+            Ok(decl) => {
+                self.dcx().emit_err(ExpectedFnPathFoundFnKeyword { fn_token_span });
+                Some(ast::Path {
+                    span: fn_token_span.to(self.prev_token.span),
+                    segments: thin_vec![ast::PathSegment {
+                        ident: Ident::new(sym::Fn, fn_token_span),
+                        id: DUMMY_NODE_ID,
+                        args: Some(P(ast::GenericArgs::Parenthesized(ast::ParenthesizedArgs {
+                            span: args_lo.to(self.prev_token.span),
+                            inputs: decl.inputs.iter().map(|a| a.ty.clone()).collect(),
+                            inputs_span: args_lo.until(decl.output.span()),
+                            output: decl.output.clone(),
+                        }))),
+                    }],
+                    tokens: None,
+                })
+            }
+            Err(diag) => {
+                diag.cancel();
+                self.restore_snapshot(snapshot);
+                None
+            }
+        }
+    }
+
+    /// Optionally parses `for<$generic_params>`.
+    pub(super) fn parse_late_bound_lifetime_defs(
+        &mut self,
+    ) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> {
+        if self.eat_keyword(exp!(For)) {
+            let lo = self.token.span;
+            self.expect_lt()?;
+            let params = self.parse_generic_params()?;
+            self.expect_gt()?;
+            // We rely on AST validation to rule out invalid cases: There must not be
+            // type or const parameters, and parameters must not have bounds.
+            Ok((params, Some(lo.to(self.prev_token.span))))
+        } else {
+            Ok((ThinVec::new(), None))
+        }
+    }
+
+    /// Recover from `Fn`-family traits (Fn, FnMut, FnOnce) with lifetime arguments
+    /// (e.g. `FnOnce<'a>(&'a str) -> bool`). Up to generic arguments have already
+    /// been eaten.
+    fn recover_fn_trait_with_lifetime_params(
+        &mut self,
+        fn_path: &mut ast::Path,
+        lifetime_defs: &mut ThinVec<GenericParam>,
+    ) -> PResult<'a, ()> {
+        let fn_path_segment = fn_path.segments.last_mut().unwrap();
+        let generic_args = if let Some(p_args) = &fn_path_segment.args {
+            *p_args.clone()
+        } else {
+            // Normally it wouldn't come here because the upstream should have parsed
+            // generic parameters (otherwise it's impossible to call this function).
+            return Ok(());
+        };
+        let lifetimes =
+            if let ast::GenericArgs::AngleBracketed(ast::AngleBracketedArgs { span: _, args }) =
+                &generic_args
+            {
+                args.into_iter()
+                    .filter_map(|arg| {
+                        if let ast::AngleBracketedArg::Arg(generic_arg) = arg
+                            && let ast::GenericArg::Lifetime(lifetime) = generic_arg
+                        {
+                            Some(lifetime)
+                        } else {
+                            None
+                        }
+                    })
+                    .collect()
+            } else {
+                Vec::new()
+            };
+        // Only try to recover if the trait has lifetime params.
+        if lifetimes.is_empty() {
+            return Ok(());
+        }
+
+        // Parse `(T, U) -> R`.
+        let inputs_lo = self.token.span;
+        let inputs: ThinVec<_> =
+            self.parse_fn_params(|_| false)?.into_iter().map(|input| input.ty).collect();
+        let inputs_span = inputs_lo.to(self.prev_token.span);
+        let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?;
+        let args = ast::ParenthesizedArgs {
+            span: fn_path_segment.span().to(self.prev_token.span),
+            inputs,
+            inputs_span,
+            output,
+        }
+        .into();
+        *fn_path_segment = ast::PathSegment {
+            ident: fn_path_segment.ident,
+            args: Some(args),
+            id: ast::DUMMY_NODE_ID,
+        };
+
+        // Convert parsed `<'a>` in `Fn<'a>` into `for<'a>`.
+        let mut generic_params = lifetimes
+            .iter()
+            .map(|lt| GenericParam {
+                id: lt.id,
+                ident: lt.ident,
+                attrs: ast::AttrVec::new(),
+                bounds: Vec::new(),
+                is_placeholder: false,
+                kind: ast::GenericParamKind::Lifetime,
+                colon_span: None,
+            })
+            .collect::<ThinVec<GenericParam>>();
+        lifetime_defs.append(&mut generic_params);
+
+        let generic_args_span = generic_args.span();
+        let snippet = format!(
+            "for<{}> ",
+            lifetimes.iter().map(|lt| lt.ident.as_str()).intersperse(", ").collect::<String>(),
+        );
+        let before_fn_path = fn_path.span.shrink_to_lo();
+        self.dcx()
+            .struct_span_err(generic_args_span, "`Fn` traits cannot take lifetime parameters")
+            .with_multipart_suggestion(
+                "consider using a higher-ranked trait bound instead",
+                vec![(generic_args_span, "".to_owned()), (before_fn_path, snippet)],
+                Applicability::MaybeIncorrect,
+            )
+            .emit();
+        Ok(())
+    }
+
+    pub(super) fn check_lifetime(&mut self) -> bool {
+        self.expected_token_types.insert(TokenType::Lifetime);
+        self.token.is_lifetime()
+    }
+
+    /// Parses a single lifetime `'a` or panics.
+    pub(super) fn expect_lifetime(&mut self) -> Lifetime {
+        if let Some((ident, is_raw)) = self.token.lifetime() {
+            if matches!(is_raw, IdentIsRaw::No)
+                && ident.without_first_quote().is_reserved()
+                && ![kw::UnderscoreLifetime, kw::StaticLifetime].contains(&ident.name)
+            {
+                self.dcx().emit_err(errors::KeywordLifetime { span: ident.span });
+            }
+
+            self.bump();
+            Lifetime { ident, id: ast::DUMMY_NODE_ID }
+        } else {
+            self.dcx().span_bug(self.token.span, "not a lifetime")
+        }
+    }
+
+    pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> {
+        P(Ty { kind, span, id: ast::DUMMY_NODE_ID, tokens: None })
+    }
+}
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
new file mode 100644
index 00000000000..a7f8d3b9139
--- /dev/null
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -0,0 +1,343 @@
+//! Meta-syntax validation logic of attributes for post-expansion.
+
+use std::slice;
+
+use rustc_ast::token::Delimiter;
+use rustc_ast::tokenstream::DelimSpan;
+use rustc_ast::{
+    self as ast, AttrArgs, Attribute, DelimArgs, MetaItem, MetaItemInner, MetaItemKind, NodeId,
+    Path, Safety,
+};
+use rustc_attr_parsing::{AttributeParser, Late};
+use rustc_errors::{Applicability, DiagCtxtHandle, FatalError, PResult};
+use rustc_feature::{AttributeSafety, AttributeTemplate, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute};
+use rustc_session::errors::report_lit_error;
+use rustc_session::lint::BuiltinLintDiag;
+use rustc_session::lint::builtin::{ILL_FORMED_ATTRIBUTE_INPUT, UNSAFE_ATTR_OUTSIDE_UNSAFE};
+use rustc_session::parse::ParseSess;
+use rustc_span::{Span, Symbol, sym};
+
+use crate::{errors, parse_in};
+
+pub fn check_attr(psess: &ParseSess, attr: &Attribute, id: NodeId) {
+    if attr.is_doc_comment() || attr.has_name(sym::cfg_trace) || attr.has_name(sym::cfg_attr_trace)
+    {
+        return;
+    }
+
+    let builtin_attr_info = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name));
+
+    let builtin_attr_safety = builtin_attr_info.map(|x| x.safety);
+    check_attribute_safety(psess, builtin_attr_safety, attr, id);
+
+    // Check input tokens for built-in and key-value attributes.
+    match builtin_attr_info {
+        // `rustc_dummy` doesn't have any restrictions specific to built-in attributes.
+        Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => {
+            match parse_meta(psess, attr) {
+                // Don't check safety again, we just did that
+                Ok(meta) => {
+                    check_builtin_meta_item(psess, &meta, attr.style, *name, *template, false)
+                }
+                Err(err) => {
+                    err.emit();
+                }
+            }
+        }
+        _ => {
+            let attr_item = attr.get_normal_item();
+            if let AttrArgs::Eq { .. } = attr_item.args {
+                // All key-value attributes are restricted to meta-item syntax.
+                match parse_meta(psess, attr) {
+                    Ok(_) => {}
+                    Err(err) => {
+                        err.emit();
+                    }
+                }
+            }
+        }
+    }
+}
+
+pub fn parse_meta<'a>(psess: &'a ParseSess, attr: &Attribute) -> PResult<'a, MetaItem> {
+    let item = attr.get_normal_item();
+    Ok(MetaItem {
+        unsafety: item.unsafety,
+        span: attr.span,
+        path: item.path.clone(),
+        kind: match &item.args {
+            AttrArgs::Empty => MetaItemKind::Word,
+            AttrArgs::Delimited(DelimArgs { dspan, delim, tokens }) => {
+                check_meta_bad_delim(psess, *dspan, *delim);
+                let nmis =
+                    parse_in(psess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
+                MetaItemKind::List(nmis)
+            }
+            AttrArgs::Eq { expr, .. } => {
+                if let ast::ExprKind::Lit(token_lit) = expr.kind {
+                    let res = ast::MetaItemLit::from_token_lit(token_lit, expr.span);
+                    let res = match res {
+                        Ok(lit) => {
+                            if token_lit.suffix.is_some() {
+                                let mut err = psess.dcx().struct_span_err(
+                                    expr.span,
+                                    "suffixed literals are not allowed in attributes",
+                                );
+                                err.help(
+                                    "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
+                                    use an unsuffixed version (`1`, `1.0`, etc.)",
+                                );
+                                return Err(err);
+                            } else {
+                                MetaItemKind::NameValue(lit)
+                            }
+                        }
+                        Err(err) => {
+                            let guar = report_lit_error(psess, err, token_lit, expr.span);
+                            let lit = ast::MetaItemLit {
+                                symbol: token_lit.symbol,
+                                suffix: token_lit.suffix,
+                                kind: ast::LitKind::Err(guar),
+                                span: expr.span,
+                            };
+                            MetaItemKind::NameValue(lit)
+                        }
+                    };
+                    res
+                } else {
+                    // Example cases:
+                    // - `#[foo = 1+1]`: results in `ast::ExprKind::Binary`.
+                    // - `#[foo = include_str!("nonexistent-file.rs")]`:
+                    //   results in `ast::ExprKind::Err`. In that case we delay
+                    //   the error because an earlier error will have already
+                    //   been reported.
+                    let msg = "attribute value must be a literal";
+                    let mut err = psess.dcx().struct_span_err(expr.span, msg);
+                    if let ast::ExprKind::Err(_) = expr.kind {
+                        err.downgrade_to_delayed_bug();
+                    }
+                    return Err(err);
+                }
+            }
+        },
+    })
+}
+
+fn check_meta_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) {
+    if let Delimiter::Parenthesis = delim {
+        return;
+    }
+    psess.dcx().emit_err(errors::MetaBadDelim {
+        span: span.entire(),
+        sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
+    });
+}
+
+pub(super) fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) {
+    if let Delimiter::Parenthesis = delim {
+        return;
+    }
+    psess.dcx().emit_err(errors::CfgAttrBadDelim {
+        span: span.entire(),
+        sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
+    });
+}
+
+/// Checks that the given meta-item is compatible with this `AttributeTemplate`.
+fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaItemKind) -> bool {
+    let is_one_allowed_subword = |items: &[MetaItemInner]| match items {
+        [item] => item.is_word() && template.one_of.iter().any(|&word| item.has_name(word)),
+        _ => false,
+    };
+    match meta {
+        MetaItemKind::Word => template.word,
+        MetaItemKind::List(items) => template.list.is_some() || is_one_allowed_subword(items),
+        MetaItemKind::NameValue(lit) if lit.kind.is_str() => template.name_value_str.is_some(),
+        MetaItemKind::NameValue(..) => false,
+    }
+}
+
+pub fn check_attribute_safety(
+    psess: &ParseSess,
+    builtin_attr_safety: Option<AttributeSafety>,
+    attr: &Attribute,
+    id: NodeId,
+) {
+    let attr_item = attr.get_normal_item();
+    match (builtin_attr_safety, attr_item.unsafety) {
+        // - Unsafe builtin attribute
+        // - User wrote `#[unsafe(..)]`, which is permitted on any edition
+        (Some(AttributeSafety::Unsafe { .. }), Safety::Unsafe(..)) => {
+            // OK
+        }
+
+        // - Unsafe builtin attribute
+        // - User did not write `#[unsafe(..)]`
+        (Some(AttributeSafety::Unsafe { unsafe_since }), Safety::Default) => {
+            let path_span = attr_item.path.span;
+
+            // If the `attr_item`'s span is not from a macro, then just suggest
+            // wrapping it in `unsafe(...)`. Otherwise, we suggest putting the
+            // `unsafe(`, `)` right after and right before the opening and closing
+            // square bracket respectively.
+            let diag_span = attr_item.span();
+
+            // Attributes can be safe in earlier editions, and become unsafe in later ones.
+            //
+            // Use the span of the attribute's name to determine the edition: the span of the
+            // attribute as a whole may be inaccurate if it was emitted by a macro.
+            //
+            // See https://github.com/rust-lang/rust/issues/142182.
+            let emit_error = match unsafe_since {
+                None => true,
+                Some(unsafe_since) => path_span.edition() >= unsafe_since,
+            };
+
+            if emit_error {
+                psess.dcx().emit_err(errors::UnsafeAttrOutsideUnsafe {
+                    span: path_span,
+                    suggestion: errors::UnsafeAttrOutsideUnsafeSuggestion {
+                        left: diag_span.shrink_to_lo(),
+                        right: diag_span.shrink_to_hi(),
+                    },
+                });
+            } else {
+                psess.buffer_lint(
+                    UNSAFE_ATTR_OUTSIDE_UNSAFE,
+                    path_span,
+                    id,
+                    BuiltinLintDiag::UnsafeAttrOutsideUnsafe {
+                        attribute_name_span: path_span,
+                        sugg_spans: (diag_span.shrink_to_lo(), diag_span.shrink_to_hi()),
+                    },
+                );
+            }
+        }
+
+        // - Normal builtin attribute, or any non-builtin attribute
+        // - All non-builtin attributes are currently considered safe; writing `#[unsafe(..)]` is
+        //   not permitted on non-builtin attributes or normal builtin attributes
+        (Some(AttributeSafety::Normal) | None, Safety::Unsafe(unsafe_span)) => {
+            psess.dcx().emit_err(errors::InvalidAttrUnsafe {
+                span: unsafe_span,
+                name: attr_item.path.clone(),
+            });
+        }
+
+        // - Normal builtin attribute
+        // - No explicit `#[unsafe(..)]` written.
+        (Some(AttributeSafety::Normal), Safety::Default) => {
+            // OK
+        }
+
+        // - Non-builtin attribute
+        // - No explicit `#[unsafe(..)]` written.
+        (None, Safety::Default) => {
+            // OK
+        }
+
+        (
+            Some(AttributeSafety::Unsafe { .. } | AttributeSafety::Normal) | None,
+            Safety::Safe(..),
+        ) => {
+            psess.dcx().span_delayed_bug(
+                attr_item.span(),
+                "`check_attribute_safety` does not expect `Safety::Safe` on attributes",
+            );
+        }
+    }
+}
+
+// Called by `check_builtin_meta_item` and code that manually denies
+// `unsafe(...)` in `cfg`
+pub fn deny_builtin_meta_unsafety(diag: DiagCtxtHandle<'_>, unsafety: Safety, name: &Path) {
+    // This only supports denying unsafety right now - making builtin attributes
+    // support unsafety will requite us to thread the actual `Attribute` through
+    // for the nice diagnostics.
+    if let Safety::Unsafe(unsafe_span) = unsafety {
+        diag.emit_err(errors::InvalidAttrUnsafe { span: unsafe_span, name: name.clone() });
+    }
+}
+
+pub fn check_builtin_meta_item(
+    psess: &ParseSess,
+    meta: &MetaItem,
+    style: ast::AttrStyle,
+    name: Symbol,
+    template: AttributeTemplate,
+    deny_unsafety: bool,
+) {
+    if !is_attr_template_compatible(&template, &meta.kind) {
+        // attrs with new parsers are locally validated so excluded here
+        if AttributeParser::<Late>::is_parsed_attribute(slice::from_ref(&name)) {
+            return;
+        }
+        emit_malformed_attribute(psess, style, meta.span, name, template);
+    }
+
+    if deny_unsafety {
+        deny_builtin_meta_unsafety(psess.dcx(), meta.unsafety, &meta.path);
+    }
+}
+
+fn emit_malformed_attribute(
+    psess: &ParseSess,
+    style: ast::AttrStyle,
+    span: Span,
+    name: Symbol,
+    template: AttributeTemplate,
+) {
+    // Some of previously accepted forms were used in practice,
+    // report them as warnings for now.
+    let should_warn = |name| matches!(name, sym::doc | sym::link | sym::test | sym::bench);
+
+    let error_msg = format!("malformed `{name}` attribute input");
+    let mut suggestions = vec![];
+    let inner = if style == ast::AttrStyle::Inner { "!" } else { "" };
+    if template.word {
+        suggestions.push(format!("#{inner}[{name}]"));
+    }
+    if let Some(descr) = template.list {
+        suggestions.push(format!("#{inner}[{name}({descr})]"));
+    }
+    suggestions.extend(template.one_of.iter().map(|&word| format!("#{inner}[{name}({word})]")));
+    if let Some(descr) = template.name_value_str {
+        suggestions.push(format!("#{inner}[{name} = \"{descr}\"]"));
+    }
+    if should_warn(name) {
+        psess.buffer_lint(
+            ILL_FORMED_ATTRIBUTE_INPUT,
+            span,
+            ast::CRATE_NODE_ID,
+            BuiltinLintDiag::IllFormedAttributeInput { suggestions: suggestions.clone() },
+        );
+    } else {
+        suggestions.sort();
+        psess
+            .dcx()
+            .struct_span_err(span, error_msg)
+            .with_span_suggestions(
+                span,
+                if suggestions.len() == 1 {
+                    "must be of the form"
+                } else {
+                    "the following are the possible correct uses"
+                },
+                suggestions,
+                Applicability::HasPlaceholders,
+            )
+            .emit();
+    }
+}
+
+pub fn emit_fatal_malformed_builtin_attribute(
+    psess: &ParseSess,
+    attr: &Attribute,
+    name: Symbol,
+) -> ! {
+    let template = BUILTIN_ATTRIBUTE_MAP.get(&name).expect("builtin attr defined").template;
+    emit_malformed_attribute(psess, attr.style, attr.span, name, template);
+    // This is fatal, otherwise it will likely cause a cascade of other errors
+    // (and an error here is expected to be very rare).
+    FatalError.raise()
+}