about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs39
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs9
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs116
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs34
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs58
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs4
-rw-r--r--compiler/rustc_parse/src/lib.rs95
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs10
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs4
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs238
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs350
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs2
-rw-r--r--compiler/rustc_parse/src/parser/item.rs117
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs90
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs2
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs71
-rw-r--r--compiler/rustc_parse/src/parser/path.rs22
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs156
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs37
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs54
20 files changed, 822 insertions, 686 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 3c3a8d6fbb9..c72b7e2cfa7 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -3,8 +3,8 @@ use std::borrow::Cow;
 use rustc_ast::token::Token;
 use rustc_ast::{Path, Visibility};
 use rustc_errors::{
-    codes::*, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
-    IntoDiagnostic, Level, SubdiagnosticMessageOp,
+    codes::*, AddToDiagnostic, Applicability, Diag, DiagCtxt, EmissionGuarantee, IntoDiagnostic,
+    Level, SubdiagMessageOp,
 };
 use rustc_macros::{Diagnostic, Subdiagnostic};
 use rustc_session::errors::ExprParenthesesNeeded;
@@ -558,14 +558,6 @@ pub(crate) struct CatchAfterTry {
 }
 
 #[derive(Diagnostic)]
-#[diag(parse_gen_fn)]
-#[help]
-pub(crate) struct GenFn {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
 #[diag(parse_comma_after_base_struct)]
 #[note]
 pub(crate) struct CommaAfterBaseStruct {
@@ -1073,12 +1065,12 @@ pub(crate) struct ExpectedIdentifier {
     pub help_cannot_start_number: Option<HelpIdentifierStartsWithNumber>,
 }
 
-impl<'a> IntoDiagnostic<'a> for ExpectedIdentifier {
+impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
     #[track_caller]
-    fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a> {
+    fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> Diag<'a, G> {
         let token_descr = TokenDescription::from_token(&self.token);
 
-        let mut diag = DiagnosticBuilder::new(
+        let mut diag = Diag::new(
             dcx,
             level,
             match token_descr {
@@ -1133,12 +1125,12 @@ pub(crate) struct ExpectedSemi {
     pub sugg: ExpectedSemiSugg,
 }
 
-impl<'a> IntoDiagnostic<'a> for ExpectedSemi {
+impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
     #[track_caller]
-    fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a> {
+    fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> Diag<'a, G> {
         let token_descr = TokenDescription::from_token(&self.token);
 
-        let mut diag = DiagnosticBuilder::new(
+        let mut diag = Diag::new(
             dcx,
             level,
             match token_descr {
@@ -1475,7 +1467,11 @@ pub(crate) struct FnTraitMissingParen {
 }
 
 impl AddToDiagnostic for FnTraitMissingParen {
-    fn add_to_diagnostic_with<F: SubdiagnosticMessageOp>(self, diag: &mut Diagnostic, _: F) {
+    fn add_to_diagnostic_with<G: EmissionGuarantee, F: SubdiagMessageOp<G>>(
+        self,
+        diag: &mut Diag<'_, G>,
+        _: F,
+    ) {
         diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren);
         let applicability = if self.machine_applicable {
             Applicability::MachineApplicable
@@ -2545,7 +2541,7 @@ pub enum HelpUseLatestEdition {
 impl HelpUseLatestEdition {
     pub fn new() -> Self {
         let edition = LATEST_STABLE_EDITION;
-        if std::env::var_os("CARGO").is_some() {
+        if rustc_session::utils::was_invoked_from_cargo() {
             Self::Cargo { edition }
         } else {
             Self::Standalone { edition }
@@ -2971,3 +2967,10 @@ pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span);
 #[derive(Diagnostic)]
 #[diag(parse_invalid_offset_of)]
 pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_async_impl)]
+pub(crate) struct AsyncImpl {
+    #[primary_span]
+    pub span: Span,
+}
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
index b1bd4ac75e5..993ff1b97f5 100644
--- a/compiler/rustc_parse/src/lexer/diagnostics.rs
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -1,6 +1,6 @@
 use super::UnmatchedDelim;
 use rustc_ast::token::Delimiter;
-use rustc_errors::Diagnostic;
+use rustc_errors::Diag;
 use rustc_span::source_map::SourceMap;
 use rustc_span::Span;
 
@@ -30,10 +30,7 @@ pub fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) ->
 
 // When we get a `)` or `]` for `{`, we should emit help message here
 // it's more friendly compared to report `unmatched error` in later phase
-pub fn report_missing_open_delim(
-    err: &mut Diagnostic,
-    unmatched_delims: &[UnmatchedDelim],
-) -> bool {
+pub fn report_missing_open_delim(err: &mut Diag<'_>, unmatched_delims: &[UnmatchedDelim]) -> bool {
     let mut reported_missing_open = false;
     for unmatch_brace in unmatched_delims.iter() {
         if let Some(delim) = unmatch_brace.found_delim
@@ -55,7 +52,7 @@ pub fn report_missing_open_delim(
 }
 
 pub fn report_suspicious_mismatch_block(
-    err: &mut Diagnostic,
+    err: &mut Diag<'_>,
     diag_info: &TokenTreeDiagInfo,
     sm: &SourceMap,
     delim: Delimiter,
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 31552452676..f57945a52df 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -4,19 +4,19 @@ use crate::errors;
 use crate::lexer::unicode_chars::UNICODE_ARRAY;
 use crate::make_unclosed_delims_error;
 use rustc_ast::ast::{self, AttrStyle};
-use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
+use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
+use rustc_errors::{codes::*, Applicability, Diag, DiagCtxt, StashKey};
 use rustc_lexer::unescape::{self, EscapeError, Mode};
 use rustc_lexer::{Base, DocStyle, RawStrError};
 use rustc_lexer::{Cursor, LiteralKind};
 use rustc_session::lint::builtin::{
     RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
 };
-use rustc_session::lint::BuiltinLintDiagnostics;
+use rustc_session::lint::BuiltinLintDiag;
 use rustc_session::parse::ParseSess;
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::symbol::Symbol;
 use rustc_span::{edition::Edition, BytePos, Pos, Span};
 
 mod diagnostics;
@@ -42,12 +42,12 @@ pub struct UnmatchedDelim {
     pub candidate_span: Option<Span>,
 }
 
-pub(crate) fn parse_token_trees<'sess, 'src>(
-    sess: &'sess ParseSess,
+pub(crate) fn parse_token_trees<'psess, 'src>(
+    psess: &'psess ParseSess,
     mut src: &'src str,
     mut start_pos: BytePos,
     override_span: Option<Span>,
-) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
+) -> Result<TokenStream, Vec<Diag<'psess>>> {
     // Skip `#!`, if present.
     if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
         src = &src[shebang_len..];
@@ -56,7 +56,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
 
     let cursor = Cursor::new(src);
     let string_reader = StringReader {
-        sess,
+        psess,
         start_pos,
         pos: start_pos,
         src,
@@ -75,7 +75,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
 
             let mut buffer = Vec::with_capacity(1);
             for unmatched in unmatched_delims {
-                if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
+                if let Some(err) = make_unclosed_delims_error(unmatched, psess) {
                     buffer.push(err);
                 }
             }
@@ -90,8 +90,8 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
     }
 }
 
-struct StringReader<'sess, 'src> {
-    sess: &'sess ParseSess,
+struct StringReader<'psess, 'src> {
+    psess: &'psess ParseSess,
     /// Initial position, read-only.
     start_pos: BytePos,
     /// The absolute offset within the source_map of the current character.
@@ -107,9 +107,9 @@ struct StringReader<'sess, 'src> {
     nbsp_is_whitespace: bool,
 }
 
-impl<'sess, 'src> StringReader<'sess, 'src> {
-    pub fn dcx(&self) -> &'sess DiagCtxt {
-        &self.sess.dcx
+impl<'psess, 'src> StringReader<'psess, 'src> {
+    pub fn dcx(&self) -> &'psess DiagCtxt {
+        &self.psess.dcx
     }
 
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
@@ -176,12 +176,12 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                 rustc_lexer::TokenKind::RawIdent => {
                     let sym = nfc_normalize(self.str_from(start + BytePos(2)));
                     let span = self.mk_sp(start, self.pos);
-                    self.sess.symbol_gallery.insert(sym, span);
+                    self.psess.symbol_gallery.insert(sym, span);
                     if !sym.can_be_raw() {
                         self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
                     }
-                    self.sess.raw_identifier_spans.push(span);
-                    token::Ident(sym, true)
+                    self.psess.raw_identifier_spans.push(span);
+                    token::Ident(sym, IdentIsRaw::Yes)
                 }
                 rustc_lexer::TokenKind::UnknownPrefix => {
                     self.report_unknown_prefix(start);
@@ -199,9 +199,9 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                 {
                     let sym = nfc_normalize(self.str_from(start));
                     let span = self.mk_sp(start, self.pos);
-                    self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
+                    self.psess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
                         .push(span);
-                    token::Ident(sym, false)
+                    token::Ident(sym, IdentIsRaw::No)
                 }
                 // split up (raw) c string literals to an ident and a string literal when edition < 2021.
                 rustc_lexer::TokenKind::Literal {
@@ -230,7 +230,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                     let suffix = if suffix_start < self.pos {
                         let string = self.str_from(suffix_start);
                         if string == "_" {
-                            self.sess
+                            self.psess
                                 .dcx
                                 .emit_err(errors::UnderscoreLiteralSuffix { span: self.mk_sp(suffix_start, self.pos) });
                             None
@@ -338,8 +338,8 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
     fn ident(&self, start: BytePos) -> TokenKind {
         let sym = nfc_normalize(self.str_from(start));
         let span = self.mk_sp(start, self.pos);
-        self.sess.symbol_gallery.insert(sym, span);
-        token::Ident(sym, false)
+        self.psess.symbol_gallery.insert(sym, span);
+        token::Ident(sym, IdentIsRaw::No)
     }
 
     /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly
@@ -350,12 +350,12 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
         let content = self.str_from(content_start);
         if contains_text_flow_control_chars(content) {
             let span = self.mk_sp(start, self.pos);
-            self.sess.buffer_lint_with_diagnostic(
+            self.psess.buffer_lint_with_diagnostic(
                 TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
                 span,
                 ast::CRATE_NODE_ID,
                 "unicode codepoint changing visible direction of text present in comment",
-                BuiltinLintDiagnostics::UnicodeTextFlow(span, content.to_string()),
+                BuiltinLintDiag::UnicodeTextFlow(span, content.to_string()),
             );
         }
     }
@@ -478,31 +478,34 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                 }
             }
             rustc_lexer::LiteralKind::Int { base, empty_int } => {
+                let mut kind = token::Integer;
                 if empty_int {
                     let span = self.mk_sp(start, end);
-                    self.dcx().emit_err(errors::NoDigitsLiteral { span });
-                    (token::Integer, sym::integer(0))
-                } else {
-                    if matches!(base, Base::Binary | Base::Octal) {
-                        let base = base as u32;
-                        let s = self.str_from_to(start + BytePos(2), end);
-                        for (idx, c) in s.char_indices() {
-                            let span = self.mk_sp(
-                                start + BytePos::from_usize(2 + idx),
-                                start + BytePos::from_usize(2 + idx + c.len_utf8()),
-                            );
-                            if c != '_' && c.to_digit(base).is_none() {
+                    let guar = self.dcx().emit_err(errors::NoDigitsLiteral { span });
+                    kind = token::Err(guar);
+                } else if matches!(base, Base::Binary | Base::Octal) {
+                    let base = base as u32;
+                    let s = self.str_from_to(start + BytePos(2), end);
+                    for (idx, c) in s.char_indices() {
+                        let span = self.mk_sp(
+                            start + BytePos::from_usize(2 + idx),
+                            start + BytePos::from_usize(2 + idx + c.len_utf8()),
+                        );
+                        if c != '_' && c.to_digit(base).is_none() {
+                            let guar =
                                 self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
-                            }
+                            kind = token::Err(guar);
                         }
                     }
-                    (token::Integer, self.symbol_from_to(start, end))
                 }
+                (kind, self.symbol_from_to(start, end))
             }
             rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
+                let mut kind = token::Float;
                 if empty_exponent {
                     let span = self.mk_sp(start, self.pos);
-                    self.dcx().emit_err(errors::EmptyExponentFloat { span });
+                    let guar = self.dcx().emit_err(errors::EmptyExponentFloat { span });
+                    kind = token::Err(guar);
                 }
                 let base = match base {
                     Base::Hexadecimal => Some("hexadecimal"),
@@ -512,9 +515,11 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                 };
                 if let Some(base) = base {
                     let span = self.mk_sp(start, end);
-                    self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
+                    let guar =
+                        self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
+                    kind = token::Err(guar)
                 }
-                (token::Float, self.symbol_from_to(start, end))
+                (kind, self.symbol_from_to(start, end))
             }
         }
     }
@@ -561,7 +566,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
     }
 
     fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
-        self.sess
+        self.psess
             .dcx
             .struct_span_fatal(
                 self.mk_sp(start, self.pos),
@@ -675,12 +680,12 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
             self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
         } else {
             // Before Rust 2021, only emit a lint for migration.
-            self.sess.buffer_lint_with_diagnostic(
+            self.psess.buffer_lint_with_diagnostic(
                 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
                 prefix_span,
                 ast::CRATE_NODE_ID,
                 format!("prefix `{prefix}` is unknown"),
-                BuiltinLintDiagnostics::ReservedPrefix(prefix_span),
+                BuiltinLintDiag::ReservedPrefix(prefix_span),
             );
         }
     }
@@ -691,7 +696,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
 
     fn cook_common(
         &self,
-        kind: token::LitKind,
+        mut kind: token::LitKind,
         mode: Mode,
         start: BytePos,
         end: BytePos,
@@ -699,7 +704,6 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
         postfix_len: u32,
         unescape: fn(&str, Mode, &mut dyn FnMut(Range<usize>, Result<(), EscapeError>)),
     ) -> (token::LitKind, Symbol) {
-        let mut has_fatal_err = false;
         let content_start = start + BytePos(prefix_len);
         let content_end = end - BytePos(postfix_len);
         let lit_content = self.str_from_to(content_start, content_end);
@@ -711,10 +715,8 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                 let lo = content_start + BytePos(start);
                 let hi = lo + BytePos(end - start);
                 let span = self.mk_sp(lo, hi);
-                if err.is_fatal() {
-                    has_fatal_err = true;
-                }
-                emit_unescape_error(
+                let is_fatal = err.is_fatal();
+                if let Some(guar) = emit_unescape_error(
                     self.dcx(),
                     lit_content,
                     span_with_quotes,
@@ -722,17 +724,21 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
                     mode,
                     range,
                     err,
-                );
+                ) {
+                    assert!(is_fatal);
+                    kind = token::Err(guar);
+                }
             }
         });
 
         // We normally exclude the quotes for the symbol, but for errors we
         // include it because it results in clearer error messages.
-        if !has_fatal_err {
-            (kind, Symbol::intern(lit_content))
+        let sym = if !matches!(kind, token::Err(_)) {
+            Symbol::intern(lit_content)
         } else {
-            (token::Err, self.symbol_from_to(start, end))
-        }
+            self.symbol_from_to(start, end)
+        };
+        (kind, sym)
     }
 
     fn cook_unicode(
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index c9ff2d58e2c..a506f98bf3a 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -8,18 +8,18 @@ use rustc_ast_pretty::pprust::token_to_string;
 use rustc_errors::{Applicability, PErr};
 use rustc_span::symbol::kw;
 
-pub(super) struct TokenTreesReader<'sess, 'src> {
-    string_reader: StringReader<'sess, 'src>,
+pub(super) struct TokenTreesReader<'psess, 'src> {
+    string_reader: StringReader<'psess, 'src>,
     /// The "next" token, which has been obtained from the `StringReader` but
     /// not yet handled by the `TokenTreesReader`.
     token: Token,
     diag_info: TokenTreeDiagInfo,
 }
 
-impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
+impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
     pub(super) fn parse_all_token_trees(
-        string_reader: StringReader<'sess, 'src>,
-    ) -> (TokenStream, Result<(), Vec<PErr<'sess>>>, Vec<UnmatchedDelim>) {
+        string_reader: StringReader<'psess, 'src>,
+    ) -> (TokenStream, Result<(), Vec<PErr<'psess>>>, Vec<UnmatchedDelim>) {
         let mut tt_reader = TokenTreesReader {
             string_reader,
             token: Token::dummy(),
@@ -35,7 +35,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
     fn parse_token_trees(
         &mut self,
         is_delimited: bool,
-    ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'sess>>>) {
+    ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'psess>>>) {
         // Move past the opening delimiter.
         let (_, open_spacing) = self.bump(false);
 
@@ -71,9 +71,9 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
         }
     }
 
-    fn eof_err(&mut self) -> PErr<'sess> {
+    fn eof_err(&mut self) -> PErr<'psess> {
         let msg = "this file contains an unclosed delimiter";
-        let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
+        let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
         for &(_, sp) in &self.diag_info.open_braces {
             err.span_label(sp, "unclosed delimiter");
             self.diag_info.unmatched_delims.push(UnmatchedDelim {
@@ -89,7 +89,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
             report_suspicious_mismatch_block(
                 &mut err,
                 &self.diag_info,
-                self.string_reader.sess.source_map(),
+                self.string_reader.psess.source_map(),
                 *delim,
             )
         }
@@ -99,7 +99,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
     fn parse_token_tree_open_delim(
         &mut self,
         open_delim: Delimiter,
-    ) -> Result<TokenTree, Vec<PErr<'sess>>> {
+    ) -> Result<TokenTree, Vec<PErr<'psess>>> {
         // The span for beginning of the delimited section
         let pre_span = self.token.span;
 
@@ -115,7 +115,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
 
         // Expand to cover the entire delimited token tree
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
-        let sm = self.string_reader.sess.source_map();
+        let sm = self.string_reader.psess.source_map();
 
         let close_spacing = match self.token.kind {
             // Correct delimiter.
@@ -232,11 +232,11 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
     fn unclosed_delim_err(
         &mut self,
         tts: TokenStream,
-        mut errs: Vec<PErr<'sess>>,
-    ) -> Vec<PErr<'sess>> {
+        mut errs: Vec<PErr<'psess>>,
+    ) -> Vec<PErr<'psess>> {
         // If there are unclosed delims, see if there are diff markers and if so, point them
         // out instead of complaining about the unclosed delims.
-        let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
+        let mut parser = crate::stream_to_parser(self.string_reader.psess, tts, None);
         let mut diff_errs = vec![];
         // Suggest removing a `{` we think appears in an `if`/`while` condition
         // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
@@ -289,17 +289,17 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
         return errs;
     }
 
-    fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> {
+    fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'psess> {
         // An unexpected closing delimiter (i.e., there is no
         // matching opening delimiter).
         let token_str = token_to_string(&self.token);
         let msg = format!("unexpected closing delimiter: `{token_str}`");
-        let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
+        let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
 
         report_suspicious_mismatch_block(
             &mut err,
             &self.diag_info,
-            self.string_reader.sess.source_map(),
+            self.string_reader.psess.source_map(),
             delim,
         );
         err.span_label(self.token.span, "unexpected closing delimiter");
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 3238f8e23bb..3ebad6a9fd7 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -3,7 +3,7 @@
 use std::iter::once;
 use std::ops::Range;
 
-use rustc_errors::{Applicability, DiagCtxt};
+use rustc_errors::{Applicability, DiagCtxt, ErrorGuaranteed};
 use rustc_lexer::unescape::{EscapeError, Mode};
 use rustc_span::{BytePos, Span};
 
@@ -21,7 +21,7 @@ pub(crate) fn emit_unescape_error(
     // range of the error inside `lit`
     range: Range<usize>,
     error: EscapeError,
-) {
+) -> Option<ErrorGuaranteed> {
     debug!(
         "emit_unescape_error: {:?}, {:?}, {:?}, {:?}, {:?}",
         lit, full_lit_span, mode, range, error
@@ -31,12 +31,12 @@ pub(crate) fn emit_unescape_error(
         let span = err_span.with_lo(err_span.hi() - BytePos(c.len_utf8() as u32));
         (c, span)
     };
-    match error {
+    Some(match error {
         EscapeError::LoneSurrogateUnicodeEscape => {
-            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true });
+            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true })
         }
         EscapeError::OutOfRangeUnicodeEscape => {
-            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false });
+            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false })
         }
         EscapeError::MoreThanOneChar => {
             use unicode_normalization::{char::is_combining_mark, UnicodeNormalization};
@@ -106,7 +106,7 @@ pub(crate) fn emit_unescape_error(
                 span: full_lit_span,
                 note,
                 suggestion: sugg,
-            });
+            })
         }
         EscapeError::EscapeOnlyChar => {
             let (c, char_span) = last_char();
@@ -116,15 +116,15 @@ pub(crate) fn emit_unescape_error(
                 escaped_sugg: c.escape_default().to_string(),
                 escaped_msg: escaped_char(c),
                 byte: mode == Mode::Byte,
-            });
+            })
         }
         EscapeError::BareCarriageReturn => {
             let double_quotes = mode.in_double_quotes();
-            dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes });
+            dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes })
         }
         EscapeError::BareCarriageReturnInRawString => {
             assert!(mode.in_double_quotes());
-            dcx.emit_err(UnescapeError::BareCrRawString(err_span));
+            dcx.emit_err(UnescapeError::BareCrRawString(err_span))
         }
         EscapeError::InvalidEscape => {
             let (c, span) = last_char();
@@ -161,16 +161,14 @@ pub(crate) fn emit_unescape_error(
                      <https://doc.rust-lang.org/reference/tokens.html#literals>",
                 );
             }
-            diag.emit();
-        }
-        EscapeError::TooShortHexEscape => {
-            dcx.emit_err(UnescapeError::TooShortHexEscape(err_span));
+            diag.emit()
         }
+        EscapeError::TooShortHexEscape => dcx.emit_err(UnescapeError::TooShortHexEscape(err_span)),
         EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
             let (c, span) = last_char();
             let is_hex = error == EscapeError::InvalidCharInHexEscape;
             let ch = escaped_char(c);
-            dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
+            dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch })
         }
         EscapeError::NonAsciiCharInByte => {
             let (c, span) = last_char();
@@ -213,23 +211,23 @@ pub(crate) fn emit_unescape_error(
                     Applicability::MaybeIncorrect,
                 );
             }
-            err.emit();
+            err.emit()
         }
         EscapeError::OutOfRangeHexEscape => {
-            dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span));
+            dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span))
         }
         EscapeError::LeadingUnderscoreUnicodeEscape => {
             let (c, span) = last_char();
             dcx.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
                 span,
                 ch: escaped_char(c),
-            });
+            })
         }
         EscapeError::OverlongUnicodeEscape => {
-            dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span));
+            dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span))
         }
         EscapeError::UnclosedUnicodeEscape => {
-            dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()));
+            dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()))
         }
         EscapeError::NoBraceInUnicodeEscape => {
             let mut suggestion = "\\u{".to_owned();
@@ -248,23 +246,17 @@ pub(crate) fn emit_unescape_error(
             } else {
                 (Some(err_span), NoBraceUnicodeSub::Help)
             };
-            dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub });
+            dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub })
         }
         EscapeError::UnicodeEscapeInByte => {
-            dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span));
+            dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span))
         }
         EscapeError::EmptyUnicodeEscape => {
-            dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span));
-        }
-        EscapeError::ZeroChars => {
-            dcx.emit_err(UnescapeError::ZeroChars(err_span));
-        }
-        EscapeError::LoneSlash => {
-            dcx.emit_err(UnescapeError::LoneSlash(err_span));
-        }
-        EscapeError::NulInCStr => {
-            dcx.emit_err(UnescapeError::NulInCStr { span: err_span });
+            dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span))
         }
+        EscapeError::ZeroChars => dcx.emit_err(UnescapeError::ZeroChars(err_span)),
+        EscapeError::LoneSlash => dcx.emit_err(UnescapeError::LoneSlash(err_span)),
+        EscapeError::NulInCStr => dcx.emit_err(UnescapeError::NulInCStr { span: err_span }),
         EscapeError::UnskippedWhitespaceWarning => {
             let (c, char_span) = last_char();
             dcx.emit_warn(UnescapeError::UnskippedWhitespace {
@@ -272,11 +264,13 @@ pub(crate) fn emit_unescape_error(
                 ch: escaped_char(c),
                 char_span,
             });
+            return None;
         }
         EscapeError::MultipleSkippedLinesWarning => {
             dcx.emit_warn(UnescapeError::MultipleSkippedLinesWarning(err_span));
+            return None;
         }
-    }
+    })
 }
 
 /// Pushes a character to a message string for error reporting
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index a136abaa28b..6b055fc844a 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
 // fancier error recovery to it, as there will be less overall work to do this way.
 const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
     (" ", "Space", None),
-    ("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
+    ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
     ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
     (",", "Comma", Some(token::Comma)),
     (";", "Semicolon", Some(token::Semi)),
@@ -350,7 +350,7 @@ pub(super) fn check_for_substitution(
 
     let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
         let msg = format!("substitution character not found for '{ch}'");
-        reader.sess.dcx.span_bug(span, msg);
+        reader.psess.dcx.span_bug(span, msg);
     };
 
     // special help suggestion for "directed" double quotes
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 5bd8bb72bd6..a46372d368f 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -18,7 +18,7 @@ use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::{AttrItem, Attribute, MetaItem};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
-use rustc_errors::{DiagnosticBuilder, FatalError, PResult};
+use rustc_errors::{Diag, FatalError, PResult};
 use rustc_session::parse::ParseSess;
 use rustc_span::{FileName, SourceFile, Span};
 
@@ -41,8 +41,7 @@ rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
 // uses a HOF to parse anything, and <source> includes file and
 // `source_str`.
 
-/// A variant of 'panictry!' that works on a `Vec<DiagnosticBuilder>` instead of a single
-/// `DiagnosticBuilder`.
+/// A variant of 'panictry!' that works on a `Vec<Diag>` instead of a single `Diag`.
 macro_rules! panictry_buffer {
     ($e:expr) => {{
         use std::result::Result::{Err, Ok};
@@ -58,84 +57,84 @@ macro_rules! panictry_buffer {
     }};
 }
 
-pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> {
-    let mut parser = new_parser_from_file(sess, input, None);
+pub fn parse_crate_from_file<'a>(input: &Path, psess: &'a ParseSess) -> PResult<'a, ast::Crate> {
+    let mut parser = new_parser_from_file(psess, input, None);
     parser.parse_crate_mod()
 }
 
 pub fn parse_crate_attrs_from_file<'a>(
     input: &Path,
-    sess: &'a ParseSess,
+    psess: &'a ParseSess,
 ) -> PResult<'a, ast::AttrVec> {
-    let mut parser = new_parser_from_file(sess, input, None);
+    let mut parser = new_parser_from_file(psess, input, None);
     parser.parse_inner_attributes()
 }
 
 pub fn parse_crate_from_source_str(
     name: FileName,
     source: String,
-    sess: &ParseSess,
+    psess: &ParseSess,
 ) -> PResult<'_, ast::Crate> {
-    new_parser_from_source_str(sess, name, source).parse_crate_mod()
+    new_parser_from_source_str(psess, name, source).parse_crate_mod()
 }
 
 pub fn parse_crate_attrs_from_source_str(
     name: FileName,
     source: String,
-    sess: &ParseSess,
+    psess: &ParseSess,
 ) -> PResult<'_, ast::AttrVec> {
-    new_parser_from_source_str(sess, name, source).parse_inner_attributes()
+    new_parser_from_source_str(psess, name, source).parse_inner_attributes()
 }
 
 pub fn parse_stream_from_source_str(
     name: FileName,
     source: String,
-    sess: &ParseSess,
+    psess: &ParseSess,
     override_span: Option<Span>,
 ) -> TokenStream {
-    source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
+    source_file_to_stream(psess, psess.source_map().new_source_file(name, source), override_span)
 }
 
 /// Creates a new parser from a source string.
-pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
-    panictry_buffer!(maybe_new_parser_from_source_str(sess, name, source))
+pub fn new_parser_from_source_str(psess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
+    panictry_buffer!(maybe_new_parser_from_source_str(psess, name, source))
 }
 
 /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
 /// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur
 /// when they are dropped.
 pub fn maybe_new_parser_from_source_str(
-    sess: &ParseSess,
+    psess: &ParseSess,
     name: FileName,
     source: String,
-) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
-    maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
+) -> Result<Parser<'_>, Vec<Diag<'_>>> {
+    maybe_source_file_to_parser(psess, psess.source_map().new_source_file(name, source))
 }
 
 /// Creates a new parser, aborting if the file doesn't exist. If a span is given, that is used on
 /// an error as the source of the problem.
-pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
-    let source_file = sess.source_map().load_file(path).unwrap_or_else(|e| {
+pub fn new_parser_from_file<'a>(psess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
+    let source_file = psess.source_map().load_file(path).unwrap_or_else(|e| {
         let msg = format!("couldn't read {}: {}", path.display(), e);
-        let mut err = sess.dcx.struct_fatal(msg);
+        let mut err = psess.dcx.struct_fatal(msg);
         if let Some(sp) = sp {
             err.span(sp);
         }
         err.emit();
     });
 
-    panictry_buffer!(maybe_source_file_to_parser(sess, source_file))
+    panictry_buffer!(maybe_source_file_to_parser(psess, source_file))
 }
 
 /// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
 /// the initial token stream.
 fn maybe_source_file_to_parser(
-    sess: &ParseSess,
+    psess: &ParseSess,
     source_file: Lrc<SourceFile>,
-) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
+) -> Result<Parser<'_>, Vec<Diag<'_>>> {
     let end_pos = source_file.end_position();
-    let stream = maybe_file_to_stream(sess, source_file, None)?;
-    let mut parser = stream_to_parser(sess, stream, None);
+    let stream = maybe_file_to_stream(psess, source_file, None)?;
+    let mut parser = stream_to_parser(psess, stream, None);
     if parser.token == token::Eof {
         parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
     }
@@ -147,47 +146,47 @@ fn maybe_source_file_to_parser(
 
 /// Given a `source_file`, produces a sequence of token trees.
 pub fn source_file_to_stream(
-    sess: &ParseSess,
+    psess: &ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
 ) -> TokenStream {
-    panictry_buffer!(maybe_file_to_stream(sess, source_file, override_span))
+    panictry_buffer!(maybe_file_to_stream(psess, source_file, override_span))
 }
 
 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
 /// parsing the token stream.
-fn maybe_file_to_stream<'sess>(
-    sess: &'sess ParseSess,
+fn maybe_file_to_stream<'psess>(
+    psess: &'psess ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
-) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
+) -> Result<TokenStream, Vec<Diag<'psess>>> {
     let src = source_file.src.as_ref().unwrap_or_else(|| {
-        sess.dcx.bug(format!(
+        psess.dcx.bug(format!(
             "cannot lex `source_file` without source: {}",
-            sess.source_map().filename_for_diagnostics(&source_file.name)
+            psess.source_map().filename_for_diagnostics(&source_file.name)
         ));
     });
 
-    lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span)
+    lexer::parse_token_trees(psess, src.as_str(), source_file.start_pos, override_span)
 }
 
 /// Given a stream and the `ParseSess`, produces a parser.
 pub fn stream_to_parser<'a>(
-    sess: &'a ParseSess,
+    psess: &'a ParseSess,
     stream: TokenStream,
     subparser_name: Option<&'static str>,
 ) -> Parser<'a> {
-    Parser::new(sess, stream, subparser_name)
+    Parser::new(psess, stream, subparser_name)
 }
 
 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.
 pub fn parse_in<'a, T>(
-    sess: &'a ParseSess,
+    psess: &'a ParseSess,
     tts: TokenStream,
     name: &'static str,
     mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
 ) -> PResult<'a, T> {
-    let mut parser = Parser::new(sess, tts, Some(name));
+    let mut parser = Parser::new(psess, tts, Some(name));
     let result = f(&mut parser)?;
     if parser.token != token::Eof {
         parser.unexpected()?;
@@ -195,28 +194,28 @@ pub fn parse_in<'a, T>(
     Ok(result)
 }
 
-pub fn fake_token_stream_for_item(sess: &ParseSess, item: &ast::Item) -> TokenStream {
+pub fn fake_token_stream_for_item(psess: &ParseSess, item: &ast::Item) -> TokenStream {
     let source = pprust::item_to_string(item);
     let filename = FileName::macro_expansion_source_code(&source);
-    parse_stream_from_source_str(filename, source, sess, Some(item.span))
+    parse_stream_from_source_str(filename, source, psess, Some(item.span))
 }
 
-pub fn fake_token_stream_for_crate(sess: &ParseSess, krate: &ast::Crate) -> TokenStream {
+pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> TokenStream {
     let source = pprust::crate_to_string_for_macros(krate);
     let filename = FileName::macro_expansion_source_code(&source);
-    parse_stream_from_source_str(filename, source, sess, Some(krate.spans.inner_span))
+    parse_stream_from_source_str(filename, source, psess, Some(krate.spans.inner_span))
 }
 
 pub fn parse_cfg_attr(
     attr: &Attribute,
-    parse_sess: &ParseSess,
+    psess: &ParseSess,
 ) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
     match attr.get_normal_item().args {
         ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
             if !tokens.is_empty() =>
         {
-            crate::validate_attr::check_cfg_attr_bad_delim(parse_sess, dspan, delim);
-            match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
+            crate::validate_attr::check_cfg_attr_bad_delim(psess, dspan, delim);
+            match parse_in(psess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
                 Ok(r) => return Some(r),
                 Err(e) => {
                     e.with_help(format!("the valid syntax is `{CFG_ATTR_GRAMMAR_HELP}`"))
@@ -225,7 +224,7 @@ pub fn parse_cfg_attr(
                 }
             }
         }
-        _ => error_malformed_cfg_attr_missing(attr.span, parse_sess),
+        _ => error_malformed_cfg_attr_missing(attr.span, psess),
     }
     None
 }
@@ -235,6 +234,6 @@ const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
     <https://doc.rust-lang.org/reference/conditional-compilation.html\
     #the-cfg_attr-attribute>";
 
-fn error_malformed_cfg_attr_missing(span: Span, parse_sess: &ParseSess) {
-    parse_sess.dcx.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP });
+fn error_malformed_cfg_attr_missing(span: Span, psess: &ParseSess) {
+    psess.dcx.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP });
 }
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 98e062dd784..eb9a10f4bda 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -8,7 +8,7 @@ use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle
 use rustc_ast as ast;
 use rustc_ast::attr;
 use rustc_ast::token::{self, Delimiter, Nonterminal};
-use rustc_errors::{codes::*, Diagnostic, PResult};
+use rustc_errors::{codes::*, Diag, PResult};
 use rustc_span::{sym, BytePos, Span};
 use thin_vec::ThinVec;
 use tracing::debug;
@@ -85,7 +85,7 @@ impl<'a> Parser<'a> {
                 // Always make an outer attribute - this allows us to recover from a misplaced
                 // inner attribute.
                 Some(attr::mk_doc_comment(
-                    &self.sess.attr_id_generator,
+                    &self.psess.attr_id_generator,
                     comment_kind,
                     ast::AttrStyle::Outer,
                     data,
@@ -135,13 +135,13 @@ impl<'a> Parser<'a> {
                 this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
             }
 
-            Ok(attr::mk_attr_from_item(&self.sess.attr_id_generator, item, None, style, attr_sp))
+            Ok(attr::mk_attr_from_item(&self.psess.attr_id_generator, item, None, style, attr_sp))
         })
     }
 
     fn annotate_following_item_if_applicable(
         &self,
-        err: &mut Diagnostic,
+        err: &mut Diag<'_>,
         span: Span,
         attr_type: OuterAttributeType,
     ) -> Option<Span> {
@@ -288,7 +288,7 @@ impl<'a> Parser<'a> {
                 if attr_style == ast::AttrStyle::Inner {
                     self.bump();
                     Some(attr::mk_doc_comment(
-                        &self.sess.attr_id_generator,
+                        &self.psess.attr_id_generator,
                         comment_kind,
                         attr_style,
                         data,
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 2307f4cfffa..a1dd7d6f673 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -40,8 +40,8 @@ impl AttrWrapper {
         AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
     }
 
-    pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec {
-        sess.dcx.span_delayed_bug(
+    pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
+        psess.dcx.span_delayed_bug(
             self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
             "AttrVec is taken for recovery but no error is produced",
         );
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 445d5b2ce79..2f7ac7d3a12 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -21,6 +21,8 @@ use crate::errors::{
 use crate::fluent_generated as fluent;
 use crate::parser;
 use crate::parser::attr::InnerAttrPolicy;
+use ast::token::IdentIsRaw;
+use parser::Recovered;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
@@ -34,8 +36,8 @@ use rustc_ast::{
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::{
-    pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
-    ErrorGuaranteed, FatalError, PErr, PResult,
+    pluralize, AddToDiagnostic, Applicability, Diag, DiagCtxt, ErrorGuaranteed, FatalError, PErr,
+    PResult,
 };
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::source_map::Spanned;
@@ -46,14 +48,14 @@ use std::ops::{Deref, DerefMut};
 use thin_vec::{thin_vec, ThinVec};
 
 /// Creates a placeholder argument.
-pub(super) fn dummy_arg(ident: Ident) -> Param {
+pub(super) fn dummy_arg(ident: Ident, guar: ErrorGuaranteed) -> Param {
     let pat = P(Pat {
         id: ast::DUMMY_NODE_ID,
         kind: PatKind::Ident(BindingAnnotation::NONE, ident, None),
         span: ident.span,
         tokens: None,
     });
-    let ty = Ty { kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None };
+    let ty = Ty { kind: TyKind::Err(guar), span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None };
     Param {
         attrs: AttrVec::default(),
         id: ast::DUMMY_NODE_ID,
@@ -208,11 +210,11 @@ struct MultiSugg {
 }
 
 impl MultiSugg {
-    fn emit(self, err: &mut Diagnostic) {
+    fn emit(self, err: &mut Diag<'_>) {
         err.multipart_suggestion(self.msg, self.patches, self.applicability);
     }
 
-    fn emit_verbose(self, err: &mut Diagnostic) {
+    fn emit_verbose(self, err: &mut Diag<'_>) {
         err.multipart_suggestion_verbose(self.msg, self.patches, self.applicability);
     }
 }
@@ -240,7 +242,7 @@ impl<'a> DerefMut for SnapshotParser<'a> {
 
 impl<'a> Parser<'a> {
     pub fn dcx(&self) -> &'a DiagCtxt {
-        &self.sess.dcx
+        &self.psess.dcx
     }
 
     /// Replace `self` with `snapshot.parser`.
@@ -255,7 +257,7 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
-        self.sess.source_map().span_to_snippet(span)
+        self.psess.source_map().span_to_snippet(span)
     }
 
     /// Emits an error with suggestions if an identifier was expected but not found.
@@ -264,7 +266,7 @@ impl<'a> Parser<'a> {
     pub(super) fn expected_ident_found(
         &mut self,
         recover: bool,
-    ) -> PResult<'a, (Ident, /* is_raw */ bool)> {
+    ) -> PResult<'a, (Ident, IdentIsRaw)> {
         if let TokenKind::DocComment(..) = self.prev_token.kind {
             return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
                 span: self.prev_token.span,
@@ -290,11 +292,11 @@ impl<'a> Parser<'a> {
         let bad_token = self.token.clone();
 
         // suggest prepending a keyword in identifier position with `r#`
-        let suggest_raw = if let Some((ident, false)) = self.token.ident()
+        let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
             && ident.is_raw_guess()
             && self.look_ahead(1, |t| valid_follow.contains(&t.kind))
         {
-            recovered_ident = Some((ident, true));
+            recovered_ident = Some((ident, IdentIsRaw::Yes));
 
             // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
             // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
@@ -320,7 +322,7 @@ impl<'a> Parser<'a> {
         let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
             let (invalid, valid) = self.token.span.split_at(len as u32);
 
-            recovered_ident = Some((Ident::new(valid_portion, valid), false));
+            recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No));
 
             HelpIdentifierStartsWithNumber { num_span: invalid }
         });
@@ -362,7 +364,7 @@ impl<'a> Parser<'a> {
 
                             if !self.look_ahead(1, |t| *t == token::Lt)
                                 && let Ok(snippet) =
-                                    self.sess.source_map().span_to_snippet(generic.span)
+                                    self.psess.source_map().span_to_snippet(generic.span)
                             {
                                 err.multipart_suggestion_verbose(
                                         format!("place the generic parameter name after the {ident_name} name"),
@@ -399,7 +401,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn expected_ident_found_err(&mut self) -> DiagnosticBuilder<'a> {
+    pub(super) fn expected_ident_found_err(&mut self) -> Diag<'a> {
         self.expected_ident_found(false).unwrap_err()
     }
 
@@ -429,7 +431,7 @@ impl<'a> Parser<'a> {
         &mut self,
         edible: &[TokenKind],
         inedible: &[TokenKind],
-    ) -> PResult<'a, bool /* recovered */> {
+    ) -> PResult<'a, Recovered> {
         debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
         fn tokens_to_string(tokens: &[TokenType]) -> String {
             let mut i = tokens.iter();
@@ -452,7 +454,6 @@ impl<'a> Parser<'a> {
         let mut expected = self
             .expected_tokens
             .iter()
-            .cloned()
             .filter(|token| {
                 // Filter out suggestions that suggest the same token which was found and deemed incorrect.
                 fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool {
@@ -464,7 +465,7 @@ impl<'a> Parser<'a> {
                     false
                 }
 
-                if *token != parser::TokenType::Token(self.token.kind.clone()) {
+                if **token != parser::TokenType::Token(self.token.kind.clone()) {
                     let eq = is_ident_eq_keyword(&self.token.kind, &token);
                     // If the suggestion is a keyword and the found token is an ident,
                     // the content of which are equal to the suggestion's content,
@@ -483,11 +484,12 @@ impl<'a> Parser<'a> {
                 }
                 false
             })
+            .cloned()
             .collect::<Vec<_>>();
         expected.sort_by_cached_key(|x| x.to_string());
         expected.dedup();
 
-        let sm = self.sess.source_map();
+        let sm = self.psess.source_map();
 
         // Special-case "expected `;`" errors.
         if expected.contains(&TokenType::Token(token::Semi)) {
@@ -532,7 +534,7 @@ impl<'a> Parser<'a> {
                     sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
                 });
                 self.bump();
-                return Ok(true);
+                return Ok(Recovered::Yes);
             } else if self.look_ahead(0, |t| {
                 t == &token::CloseDelim(Delimiter::Brace)
                     || ((t.can_begin_expr() || t.can_begin_item())
@@ -556,7 +558,7 @@ impl<'a> Parser<'a> {
                     unexpected_token_label: Some(self.token.span),
                     sugg: ExpectedSemiSugg::AddSemi(span),
                 });
-                return Ok(true);
+                return Ok(Recovered::Yes);
             }
         }
 
@@ -653,9 +655,9 @@ impl<'a> Parser<'a> {
         // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
         // that in the parser requires unbounded lookahead, so we only add a hint to the existing
         // error rather than replacing it entirely.
-        if ((self.prev_token.kind == TokenKind::Ident(sym::c, false)
+        if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
             && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
-            || (self.prev_token.kind == TokenKind::Ident(sym::cr, false)
+            || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
                 && matches!(
                     &self.token.kind,
                     TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
@@ -711,7 +713,7 @@ impl<'a> Parser<'a> {
         if self.check_too_many_raw_str_terminators(&mut err) {
             if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
                 err.emit();
-                return Ok(true);
+                return Ok(Recovered::Yes);
             } else {
                 return Err(err);
             }
@@ -742,7 +744,8 @@ impl<'a> Parser<'a> {
         Err(err)
     }
 
-    pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) {
+    /// The user has written `#[attr] expr` which is unsupported. (#106020)
+    pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) -> ErrorGuaranteed {
         // Missing semicolon typo error.
         let span = self.prev_token.span.shrink_to_hi();
         let mut err = self.dcx().create_err(ExpectedSemi {
@@ -785,6 +788,8 @@ impl<'a> Parser<'a> {
                 ],
                 Applicability::MachineApplicable,
             );
+
+            // Special handling for `#[cfg(...)]` chains
             let mut snapshot = self.create_snapshot_for_diagnostic();
             if let [attr] = &expr.attrs[..]
                 && let ast::AttrKind::Normal(attr_kind) = &attr.kind
@@ -795,9 +800,8 @@ impl<'a> Parser<'a> {
                 {
                     Ok(next_attr) => next_attr,
                     Err(inner_err) => {
-                        err.cancel();
                         inner_err.cancel();
-                        return;
+                        return err.emit();
                     }
                 }
                 && let ast::AttrKind::Normal(next_attr_kind) = next_attr.kind
@@ -808,9 +812,8 @@ impl<'a> Parser<'a> {
                 let next_expr = match snapshot.parse_expr() {
                     Ok(next_expr) => next_expr,
                     Err(inner_err) => {
-                        err.cancel();
                         inner_err.cancel();
-                        return;
+                        return err.emit();
                     }
                 };
                 // We have for sure
@@ -819,7 +822,7 @@ impl<'a> Parser<'a> {
                 // #[cfg(..)]
                 // other_expr
                 // So we suggest using `if cfg!(..) { expr } else if cfg!(..) { other_expr }`.
-                let margin = self.sess.source_map().span_to_margin(next_expr.span).unwrap_or(0);
+                let margin = self.psess.source_map().span_to_margin(next_expr.span).unwrap_or(0);
                 let sugg = vec![
                     (attr.span.with_hi(segment.span().hi()), "if cfg!".to_string()),
                     (args_span.shrink_to_hi().with_hi(attr.span.hi()), " {".to_string()),
@@ -843,11 +846,11 @@ impl<'a> Parser<'a> {
                 );
             }
         }
-        err.emit();
+        err.emit()
     }
 
-    fn check_too_many_raw_str_terminators(&mut self, err: &mut Diagnostic) -> bool {
-        let sm = self.sess.source_map();
+    fn check_too_many_raw_str_terminators(&mut self, err: &mut Diag<'_>) -> bool {
+        let sm = self.psess.source_map();
         match (&self.prev_token.kind, &self.token.kind) {
             (
                 TokenKind::Literal(Lit {
@@ -919,10 +922,10 @@ impl<'a> Parser<'a> {
                     // fn foo() -> Foo { Path {
                     //     field: value,
                     // } }
-                    err.delay_as_bug();
+                    let guar = err.delay_as_bug();
                     self.restore_snapshot(snapshot);
                     let mut tail = self.mk_block(
-                        thin_vec![self.mk_stmt_err(expr.span)],
+                        thin_vec![self.mk_stmt_err(expr.span, guar)],
                         s,
                         lo.to(self.prev_token.span),
                     );
@@ -932,7 +935,7 @@ impl<'a> Parser<'a> {
                         // expand `before` so that we take care of module path such as:
                         // `foo::Bar { ... } `
                         // we expect to suggest `(foo::Bar { ... })` instead of `foo::(Bar { ... })`
-                        let sm = self.sess.source_map();
+                        let sm = self.psess.source_map();
                         let before = maybe_struct_name.span.shrink_to_lo();
                         if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| {
                             t.is_alphanumeric() || t == ':' || t == '_'
@@ -980,7 +983,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_closure_body(
         &mut self,
-        mut err: DiagnosticBuilder<'a>,
+        mut err: Diag<'a>,
         before: token::Token,
         prev: token::Token,
         token: token::Token,
@@ -988,7 +991,7 @@ impl<'a> Parser<'a> {
         decl_hi: Span,
     ) -> PResult<'a, P<Expr>> {
         err.span_label(lo.to(decl_hi), "while parsing the body of this closure");
-        match before.kind {
+        let guar = match before.kind {
             token::OpenDelim(Delimiter::Brace)
                 if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
             {
@@ -1002,8 +1005,9 @@ impl<'a> Parser<'a> {
                     ],
                     Applicability::MaybeIncorrect,
                 );
-                err.emit();
+                let guar = err.emit();
                 self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+                guar
             }
             token::OpenDelim(Delimiter::Parenthesis)
                 if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
@@ -1020,7 +1024,7 @@ impl<'a> Parser<'a> {
                     ],
                     Applicability::MaybeIncorrect,
                 );
-                err.emit();
+                err.emit()
             }
             _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => {
                 // We don't have a heuristic to correctly identify where the block
@@ -1033,8 +1037,8 @@ impl<'a> Parser<'a> {
                 return Err(err);
             }
             _ => return Err(err),
-        }
-        Ok(self.mk_expr_err(lo.to(self.token.span)))
+        };
+        Ok(self.mk_expr_err(lo.to(self.token.span), guar))
     }
 
     /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
@@ -1210,9 +1214,9 @@ impl<'a> Parser<'a> {
     /// encounter a parse error when encountering the first `,`.
     pub(super) fn check_mistyped_turbofish_with_multiple_type_params(
         &mut self,
-        mut e: DiagnosticBuilder<'a>,
+        mut e: Diag<'a>,
         expr: &mut P<Expr>,
-    ) -> PResult<'a, ()> {
+    ) -> PResult<'a, ErrorGuaranteed> {
         if let ExprKind::Binary(binop, _, _) = &expr.kind
             && let ast::BinOpKind::Lt = binop.node
             && self.eat(&token::Comma)
@@ -1223,7 +1227,7 @@ impl<'a> Parser<'a> {
                 |p| p.parse_generic_arg(None),
             );
             match x {
-                Ok((_, _, false)) => {
+                Ok((_, _, Recovered::No)) => {
                     if self.eat(&token::Gt) {
                         // We made sense of it. Improve the error message.
                         e.span_suggestion_verbose(
@@ -1237,9 +1241,9 @@ impl<'a> Parser<'a> {
                                 // The subsequent expression is valid. Mark
                                 // `expr` as erroneous and emit `e` now, but
                                 // return `Ok` so parsing can continue.
-                                e.emit();
-                                *expr = self.mk_expr_err(expr.span.to(self.prev_token.span));
-                                return Ok(());
+                                let guar = e.emit();
+                                *expr = self.mk_expr_err(expr.span.to(self.prev_token.span), guar);
+                                return Ok(guar);
                             }
                             Err(err) => {
                                 err.cancel();
@@ -1247,7 +1251,7 @@ impl<'a> Parser<'a> {
                         }
                     }
                 }
-                Ok((_, _, true)) => {}
+                Ok((_, _, Recovered::Yes)) => {}
                 Err(err) => {
                     err.cancel();
                 }
@@ -1258,7 +1262,7 @@ impl<'a> Parser<'a> {
 
     /// Suggest add the missing `let` before the identifier in stmt
     /// `a: Ty = 1` -> `let a: Ty = 1`
-    pub(super) fn suggest_add_missing_let_for_stmt(&mut self, err: &mut DiagnosticBuilder<'a>) {
+    pub(super) fn suggest_add_missing_let_for_stmt(&mut self, err: &mut Diag<'a>) {
         if self.token == token::Colon {
             let prev_span = self.prev_token.span.shrink_to_lo();
             let snapshot = self.create_snapshot_for_diagnostic();
@@ -1286,7 +1290,7 @@ impl<'a> Parser<'a> {
         err: &mut ComparisonOperatorsCannotBeChained,
         inner_op: &Expr,
         outer_op: &Spanned<AssocOp>,
-    ) -> bool /* advanced the cursor */ {
+    ) -> Recovered {
         if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
             if let ExprKind::Field(_, ident) = l1.kind
                 && ident.as_str().parse::<i32>().is_err()
@@ -1294,7 +1298,7 @@ impl<'a> Parser<'a> {
             {
                 // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
                 // suggestion being the only one to apply is high.
-                return false;
+                return Recovered::No;
             }
             return match (op.node, &outer_op.node) {
                 // `x == y == z`
@@ -1313,7 +1317,7 @@ impl<'a> Parser<'a> {
                         span: inner_op.span.shrink_to_hi(),
                         middle_term: expr_to_str(r1),
                     });
-                    false // Keep the current parse behavior, where the AST is `(x < y) < z`.
+                    Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`.
                 }
                 // `x == y < z`
                 (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
@@ -1327,12 +1331,12 @@ impl<'a> Parser<'a> {
                                 left: r1.span.shrink_to_lo(),
                                 right: r2.span.shrink_to_hi(),
                             });
-                            true
+                            Recovered::Yes
                         }
                         Err(expr_err) => {
                             expr_err.cancel();
                             self.restore_snapshot(snapshot);
-                            false
+                            Recovered::Yes
                         }
                     }
                 }
@@ -1347,19 +1351,19 @@ impl<'a> Parser<'a> {
                                 left: l1.span.shrink_to_lo(),
                                 right: r1.span.shrink_to_hi(),
                             });
-                            true
+                            Recovered::Yes
                         }
                         Err(expr_err) => {
                             expr_err.cancel();
                             self.restore_snapshot(snapshot);
-                            false
+                            Recovered::No
                         }
                     }
                 }
-                _ => false,
+                _ => Recovered::No,
             };
         }
-        false
+        Recovered::No
     }
 
     /// Produces an error if comparison operators are chained (RFC #558).
@@ -1391,7 +1395,8 @@ impl<'a> Parser<'a> {
             outer_op.node,
         );
 
-        let mk_err_expr = |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err)));
+        let mk_err_expr =
+            |this: &Self, span, guar| Ok(Some(this.mk_expr(span, ExprKind::Err(guar))));
 
         match &inner_op.kind {
             ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => {
@@ -1441,11 +1446,11 @@ impl<'a> Parser<'a> {
                         match self.parse_expr() {
                             Ok(_) => {
                                 // 99% certain that the suggestion is correct, continue parsing.
-                                self.dcx().emit_err(err);
+                                let guar = self.dcx().emit_err(err);
                                 // FIXME: actually check that the two expressions in the binop are
                                 // paths and resynthesize new fn call expression instead of using
                                 // `ExprKind::Err` placeholder.
-                                mk_err_expr(self, inner_op.span.to(self.prev_token.span))
+                                mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
                             }
                             Err(expr_err) => {
                                 expr_err.cancel();
@@ -1469,11 +1474,11 @@ impl<'a> Parser<'a> {
                         match self.consume_fn_args() {
                             Err(()) => Err(self.dcx().create_err(err)),
                             Ok(()) => {
-                                self.dcx().emit_err(err);
+                                let guar = self.dcx().emit_err(err);
                                 // FIXME: actually check that the two expressions in the binop are
                                 // paths and resynthesize new fn call expression instead of using
                                 // `ExprKind::Err` placeholder.
-                                mk_err_expr(self, inner_op.span.to(self.prev_token.span))
+                                mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
                             }
                         }
                     } else {
@@ -1487,10 +1492,11 @@ impl<'a> Parser<'a> {
 
                         // If it looks like a genuine attempt to chain operators (as opposed to a
                         // misformatted turbofish, for instance), suggest a correct form.
-                        if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
-                        {
-                            self.dcx().emit_err(err);
-                            mk_err_expr(self, inner_op.span.to(self.prev_token.span))
+                        let recovered = self
+                            .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
+                        if matches!(recovered, Recovered::Yes) {
+                            let guar = self.dcx().emit_err(err);
+                            mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
                         } else {
                             // These cases cause too many knock-down errors, bail out (#61329).
                             Err(self.dcx().create_err(err))
@@ -1499,9 +1505,9 @@ impl<'a> Parser<'a> {
                 }
                 let recover =
                     self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
-                self.dcx().emit_err(err);
-                if recover {
-                    return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
+                let guar = self.dcx().emit_err(err);
+                if matches!(recover, Recovered::Yes) {
+                    return mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar);
                 }
             }
             _ => {}
@@ -1540,14 +1546,14 @@ impl<'a> Parser<'a> {
     pub(super) fn maybe_recover_from_question_mark(&mut self, ty: P<Ty>) -> P<Ty> {
         if self.token == token::Question {
             self.bump();
-            self.dcx().emit_err(QuestionMarkInType {
+            let guar = self.dcx().emit_err(QuestionMarkInType {
                 span: self.prev_token.span,
                 sugg: QuestionMarkInTypeSugg {
                     left: ty.span.shrink_to_lo(),
                     right: self.prev_token.span,
                 },
             });
-            self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err)
+            self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err(guar))
         } else {
             ty
         }
@@ -1677,7 +1683,7 @@ impl<'a> Parser<'a> {
         );
         err.span_label(op_span, format!("not a valid {} operator", kind.fixity));
 
-        let help_base_case = |mut err: DiagnosticBuilder<'_, _>, base| {
+        let help_base_case = |mut err: Diag<'_, _>, base| {
             err.help(format!("use `{}= 1` instead", kind.op.chr()));
             err.emit();
             Ok(base)
@@ -1838,12 +1844,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
+    /// Creates a `Diag` for an unexpected token `t` and tries to recover if it is a
     /// closing delimiter.
-    pub(super) fn unexpected_try_recover(
-        &mut self,
-        t: &TokenKind,
-    ) -> PResult<'a, bool /* recovered */> {
+    pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
         let token_str = pprust::token_kind_to_string(t);
         let this_token_str = super::token_descr(&self.token);
         let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
@@ -1869,7 +1872,7 @@ impl<'a> Parser<'a> {
         );
         let mut err = self.dcx().struct_span_err(sp, msg);
         let label_exp = format!("expected `{token_str}`");
-        let sm = self.sess.source_map();
+        let sm = self.psess.source_map();
         if !sm.is_multiline(prev_sp.until(sp)) {
             // When the spans are in the same line, it means that the only content
             // between them is whitespace, point only at the found token.
@@ -1890,7 +1893,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_colon_as_semi(&mut self) -> bool {
         let line_idx = |span: Span| {
-            self.sess
+            self.psess
                 .source_map()
                 .span_to_lines(span)
                 .ok()
@@ -1903,7 +1906,7 @@ impl<'a> Parser<'a> {
         {
             self.dcx().emit_err(ColonAsSemi {
                 span: self.token.span,
-                type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()),
+                type_ascription: self.psess.unstable_features.is_nightly_build().then_some(()),
             });
             self.bump();
             return true;
@@ -1925,8 +1928,8 @@ impl<'a> Parser<'a> {
         } else {
             self.recover_await_prefix(await_sp)?
         };
-        let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
-        let expr = self.mk_expr(lo.to(sp), ExprKind::Err);
+        let (sp, guar) = self.error_on_incorrect_await(lo, hi, &expr, is_question);
+        let expr = self.mk_expr_err(lo.to(sp), guar);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
@@ -1955,21 +1958,27 @@ impl<'a> Parser<'a> {
         Ok((expr.span, expr, is_question))
     }
 
-    fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
+    fn error_on_incorrect_await(
+        &self,
+        lo: Span,
+        hi: Span,
+        expr: &Expr,
+        is_question: bool,
+    ) -> (Span, ErrorGuaranteed) {
         let span = lo.to(hi);
         let applicability = match expr.kind {
             ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
             _ => Applicability::MachineApplicable,
         };
 
-        self.dcx().emit_err(IncorrectAwait {
+        let guar = self.dcx().emit_err(IncorrectAwait {
             span,
             sugg_span: (span, applicability),
             expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(expr)),
             question_mark: if is_question { "?" } else { "" },
         });
 
-        span
+        (span, guar)
     }
 
     /// If encountering `future.await()`, consumes and emits an error.
@@ -2013,8 +2022,8 @@ impl<'a> Parser<'a> {
                 );
             }
             err.span_suggestion(lo.shrink_to_lo(), format!("{prefix}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax"), "r#", Applicability::MachineApplicable);
-            err.emit();
-            Ok(self.mk_expr_err(lo.to(hi)))
+            let guar = err.emit();
+            Ok(self.mk_expr_err(lo.to(hi), guar))
         } else {
             Err(self.expected_expression_found()) // The user isn't trying to invoke the try! macro
         }
@@ -2059,10 +2068,10 @@ impl<'a> Parser<'a> {
         lo: Span,
         err: PErr<'a>,
     ) -> P<Expr> {
-        err.emit();
+        let guar = err.emit();
         // Recover from parse error, callers expect the closing delim to be consumed.
         self.consume_block(delim, ConsumeClosingDelim::Yes);
-        self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err)
+        self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err(guar))
     }
 
     /// Eats tokens until we can be relatively sure we reached the end of the
@@ -2179,7 +2188,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn parameter_without_type(
         &mut self,
-        err: &mut Diagnostic,
+        err: &mut Diag<'_>,
         pat: P<ast::Pat>,
         require_name: bool,
         first_param: bool,
@@ -2304,8 +2313,8 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
         let span = param.pat.span;
-        param.ty.kind = TyKind::Err;
-        self.dcx().emit_err(SelfParamNotFirst { span });
+        let guar = self.dcx().emit_err(SelfParamNotFirst { span });
+        param.ty.kind = TyKind::Err(guar);
         Ok(param)
     }
 
@@ -2336,7 +2345,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
+    pub(super) fn expected_expression_found(&self) -> Diag<'a> {
         let (span, msg) = match (&self.token.kind, self.subparser_name) {
             (&token::Eof, Some(origin)) => {
                 let sp = self.prev_token.span.shrink_to_hi();
@@ -2348,9 +2357,9 @@ impl<'a> Parser<'a> {
             ),
         };
         let mut err = self.dcx().struct_span_err(span, msg);
-        let sp = self.sess.source_map().start_point(self.token.span);
-        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+        let sp = self.psess.source_map().start_point(self.token.span);
+        if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp));
         }
         err.span_label(span, "expected expression");
 
@@ -2437,7 +2446,7 @@ impl<'a> Parser<'a> {
     pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut ThinVec<Param>) {
         let mut seen_inputs = FxHashSet::default();
         for input in fn_inputs.iter_mut() {
-            let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
+            let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err(_)) =
                 (&input.pat.kind, &input.ty.kind)
             {
                 Some(*ident)
@@ -2530,7 +2539,7 @@ impl<'a> Parser<'a> {
         };
 
         let ident = param.ident.to_string();
-        let sugg = match (ty_generics, self.sess.source_map().span_to_snippet(param.span())) {
+        let sugg = match (ty_generics, self.psess.source_map().span_to_snippet(param.span())) {
             (Some(Generics { params, span: impl_generics, .. }), Ok(snippet)) => {
                 Some(match &params[..] {
                     [] => UnexpectedConstParamDeclarationSugg::AddParam {
@@ -2549,9 +2558,10 @@ impl<'a> Parser<'a> {
             }
             _ => None,
         };
-        self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
+        let guar =
+            self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
 
-        let value = self.mk_expr_err(param.span());
+        let value = self.mk_expr_err(param.span(), guar);
         Some(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }))
     }
 
@@ -2585,11 +2595,7 @@ impl<'a> Parser<'a> {
     /// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
     /// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
     /// if we think that the resulting expression would be well formed.
-    pub fn recover_const_arg(
-        &mut self,
-        start: Span,
-        mut err: DiagnosticBuilder<'a>,
-    ) -> PResult<'a, GenericArg> {
+    pub fn recover_const_arg(&mut self, start: Span, mut err: Diag<'a>) -> PResult<'a, GenericArg> {
         let is_op_or_dot = AssocOp::from_token(&self.token)
             .and_then(|op| {
                 if let AssocOp::Greater
@@ -2630,8 +2636,8 @@ impl<'a> Parser<'a> {
                         "=",
                         Applicability::MaybeIncorrect,
                     );
-                    let value = self.mk_expr_err(start.to(expr.span));
-                    err.emit();
+                    let guar = err.emit();
+                    let value = self.mk_expr_err(start.to(expr.span), guar);
                     return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
                 } else if token::Colon == snapshot.token.kind
                     && expr.span.lo() == snapshot.token.span.hi()
@@ -2644,8 +2650,10 @@ impl<'a> Parser<'a> {
                         "::",
                         Applicability::MaybeIncorrect,
                     );
-                    err.emit();
-                    return Ok(GenericArg::Type(self.mk_ty(start.to(expr.span), TyKind::Err)));
+                    let guar = err.emit();
+                    return Ok(GenericArg::Type(
+                        self.mk_ty(start.to(expr.span), TyKind::Err(guar)),
+                    ));
                 } else if token::Comma == self.token.kind || self.token.kind.should_end_const_arg()
                 {
                     // Avoid the following output by checking that we consumed a full const arg:
@@ -2688,19 +2696,15 @@ impl<'a> Parser<'a> {
     }
 
     /// Creates a dummy const argument, and reports that the expression must be enclosed in braces
-    pub fn dummy_const_arg_needs_braces(
-        &self,
-        mut err: DiagnosticBuilder<'a>,
-        span: Span,
-    ) -> GenericArg {
+    pub fn dummy_const_arg_needs_braces(&self, mut err: Diag<'a>, span: Span) -> GenericArg {
         err.multipart_suggestion(
             "expressions must be enclosed in braces to be used as const generic \
              arguments",
             vec![(span.shrink_to_lo(), "{ ".to_string()), (span.shrink_to_hi(), " }".to_string())],
             Applicability::MaybeIncorrect,
         );
-        let value = self.mk_expr_err(span);
-        err.emit();
+        let guar = err.emit();
+        let value = self.mk_expr_err(span, guar);
         GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })
     }
 
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 1a57474bac2..6cc358db9fc 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -3,13 +3,14 @@ use super::diagnostics::SnapshotParser;
 use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
-    AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
-    SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
+    AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions,
+    SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken,
 };
 
 use crate::errors;
 use crate::maybe_recover_from_interpolated_ty_qpath;
 use ast::mut_visit::{noop_visit_expr, MutVisitor};
+use ast::token::IdentIsRaw;
 use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment};
 use core::mem;
 use rustc_ast::ptr::P;
@@ -25,17 +26,15 @@ use rustc_ast::{Arm, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLim
 use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::stack::ensure_sufficient_stack;
-use rustc_errors::{
-    AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, PResult, StashKey,
-};
+use rustc_errors::{AddToDiagnostic, Applicability, Diag, PResult, StashKey};
 use rustc_lexer::unescape::unescape_char;
 use rustc_macros::Subdiagnostic;
 use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
 use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
-use rustc_session::lint::BuiltinLintDiagnostics;
+use rustc_session::lint::BuiltinLintDiag;
 use rustc_span::source_map::{self, Spanned};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::{BytePos, Pos, Span};
+use rustc_span::{BytePos, ErrorGuaranteed, Pos, Span};
 use thin_vec::{thin_vec, ThinVec};
 
 /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
@@ -128,13 +127,13 @@ impl<'a> Parser<'a> {
         match self.parse_expr_res(restrictions, None) {
             Ok(expr) => Ok(expr),
             Err(err) => match self.token.ident() {
-                Some((Ident { name: kw::Underscore, .. }, false))
+                Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
                     if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
                 {
                     // Special-case handling of `foo(_, _, _)`
-                    err.emit();
+                    let guar = err.emit();
                     self.bump();
-                    Ok(self.mk_expr(self.prev_token.span, ExprKind::Err))
+                    Ok(self.mk_expr(self.prev_token.span, ExprKind::Err(guar)))
                 }
                 _ => Err(err),
             },
@@ -404,8 +403,8 @@ impl<'a> Parser<'a> {
                 // suggestions based on the assumption that double-refs are rarely intentional,
                 // and closures are distinct enough that they don't get mixed up with their
                 // return value.
-                let sp = self.sess.source_map().start_point(self.token.span);
-                self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
+                let sp = self.psess.source_map().start_point(self.token.span);
+                self.psess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
                 false
             }
             (true, Some(op)) if !op.can_continue_expr_unambiguously() => false,
@@ -431,7 +430,7 @@ impl<'a> Parser<'a> {
     /// The method does not advance the current token.
     ///
     /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
-    fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
+    pub fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
         let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) {
             // When parsing const expressions, stop parsing when encountering `>`.
             (
@@ -459,7 +458,9 @@ impl<'a> Parser<'a> {
                 return None;
             }
             (Some(op), _) => (op, self.token.span),
-            (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
+            (None, Some((Ident { name: sym::and, span }, IdentIsRaw::No)))
+                if self.may_recover() =>
+            {
                 self.dcx().emit_err(errors::InvalidLogicalOperator {
                     span: self.token.span,
                     incorrect: "and".into(),
@@ -467,7 +468,7 @@ impl<'a> Parser<'a> {
                 });
                 (AssocOp::LAnd, span)
             }
-            (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
+            (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => {
                 self.dcx().emit_err(errors::InvalidLogicalOperator {
                     span: self.token.span,
                     incorrect: "or".into(),
@@ -607,7 +608,7 @@ impl<'a> Parser<'a> {
                 };
 
                 // a block on the LHS might have been intended to be an expression instead
-                if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
+                if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) {
                     err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp));
                 } else {
                     err.remove_plus = Some(lo);
@@ -665,9 +666,9 @@ impl<'a> Parser<'a> {
     fn parse_expr_box(&mut self, box_kw: Span) -> PResult<'a, (Span, ExprKind)> {
         let (span, _) = self.parse_expr_prefix_common(box_kw)?;
         let inner_span = span.with_lo(box_kw.hi());
-        let code = self.sess.source_map().span_to_snippet(inner_span).unwrap();
-        self.dcx().emit_err(errors::BoxSyntaxRemoved { span: span, code: code.trim() });
-        Ok((span, ExprKind::Err))
+        let code = self.psess.source_map().span_to_snippet(inner_span).unwrap();
+        let guar = self.dcx().emit_err(errors::BoxSyntaxRemoved { span: span, code: code.trim() });
+        Ok((span, ExprKind::Err(guar)))
     }
 
     fn is_mistaken_not_ident_negation(&self) -> bool {
@@ -699,7 +700,7 @@ impl<'a> Parser<'a> {
             // Span the `not` plus trailing whitespace to avoid
             // trailing whitespace after the `!` in our suggestion
             sub: sub_diag(
-                self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
+                self.psess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
             ),
         });
 
@@ -744,7 +745,7 @@ impl<'a> Parser<'a> {
                     (
                         // `foo: `
                         ExprKind::Path(None, ast::Path { segments, .. }),
-                        token::Ident(kw::For | kw::Loop | kw::While, false),
+                        token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No),
                     ) if segments.len() == 1 => {
                         let snapshot = self.create_snapshot_for_diagnostic();
                         let label = Label {
@@ -859,13 +860,13 @@ impl<'a> Parser<'a> {
                     ExprKind::MethodCall(_) => "a method call",
                     ExprKind::Call(_, _) => "a function call",
                     ExprKind::Await(_, _) => "`.await`",
-                    ExprKind::Err => return Ok(with_postfix),
+                    ExprKind::Err(_) => return Ok(with_postfix),
                     _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"),
                 }
             );
             let mut err = self.dcx().struct_span_err(span, msg);
 
-            let suggest_parens = |err: &mut Diagnostic| {
+            let suggest_parens = |err: &mut Diag<'_>| {
                 let suggestions = vec![
                     (span.shrink_to_lo(), "(".to_string()),
                     (span.shrink_to_hi(), ")".to_string()),
@@ -914,7 +915,7 @@ impl<'a> Parser<'a> {
             let found_raw = self.eat_keyword(kw::Raw);
             assert!(found_raw);
             let mutability = self.parse_const_or_mut().unwrap();
-            self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span));
+            self.psess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span));
             (ast::BorrowKind::Raw, mutability)
         } else {
             // `mut?`
@@ -957,19 +958,20 @@ impl<'a> Parser<'a> {
 
     fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
         loop {
-            let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
-                // we are using noexpect here because we don't expect a `?` directly after a `return`
-                // which could be suggested otherwise
-                self.eat_noexpect(&token::Question)
-            } else {
-                self.eat(&token::Question)
-            };
+            let has_question =
+                if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
+                    // we are using noexpect here because we don't expect a `?` directly after a `return`
+                    // which could be suggested otherwise
+                    self.eat_noexpect(&token::Question)
+                } else {
+                    self.eat(&token::Question)
+                };
             if has_question {
                 // `expr?`
                 e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
                 continue;
             }
-            let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
+            let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
                 // we are using noexpect here because we don't expect a `.` directly after a `return`
                 // which could be suggested otherwise
                 self.eat_noexpect(&token::Dot)
@@ -992,7 +994,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+    pub fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
         match self.token.uninterpolate().kind {
             token::Ident(..) => self.parse_dot_suffix(base, lo),
             token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
@@ -1011,7 +1013,7 @@ impl<'a> Parser<'a> {
     fn error_unexpected_after_dot(&self) {
         let actual = pprust::token_to_string(&self.token);
         let span = self.token.span;
-        let sm = self.sess.source_map();
+        let sm = self.psess.source_map();
         let (span, actual) = match (&self.token.kind, self.subparser_name) {
             (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => {
                 (span.shrink_to_hi(), actual.into())
@@ -1128,19 +1130,19 @@ impl<'a> Parser<'a> {
             // 1.
             DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
                 assert!(suffix.is_none());
-                self.token = Token::new(token::Ident(sym, false), ident_span);
+                self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
                 let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
                 self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
             }
             // 1.2 | 1.2e3
             DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
-                self.token = Token::new(token::Ident(symbol1, false), ident1_span);
+                self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span);
                 // This needs to be `Spacing::Alone` to prevent regressions.
                 // See issue #76399 and PR #76285 for more details
                 let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
                 let base1 =
                     self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
-                let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
+                let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span);
                 self.bump_with((next_token2, self.token_spacing)); // `.`
                 self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
             }
@@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> {
                         let fields: Vec<_> =
                             fields.into_iter().filter(|field| !field.is_shorthand).collect();
 
-                        if !fields.is_empty() &&
+                        let guar = if !fields.is_empty() &&
                             // `token.kind` should not be compared here.
                             // This is because the `snapshot.token.kind` is treated as the same as
                             // that of the open delim in `TokenTreesReader::parse_token_tree`, even
@@ -1336,11 +1338,11 @@ impl<'a> Parser<'a> {
                                             .collect(),
                                     },
                                 })
-                                .emit();
+                                .emit()
                         } else {
-                            err.emit();
-                        }
-                        Ok(self.mk_expr_err(span))
+                            err.emit()
+                        };
+                        Ok(self.mk_expr_err(span, guar))
                     }
                     Ok(_) => Err(err),
                     Err(err2) => {
@@ -1432,8 +1434,8 @@ impl<'a> Parser<'a> {
                 this.parse_expr_closure().map_err(|mut err| {
                     // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
                     // then suggest parens around the lhs.
-                    if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
-                        err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+                    if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) {
+                        err.subdiagnostic(this.dcx(), ExprParenthesesNeeded::surrounding(*sp));
                     }
                     err
                 })
@@ -1557,7 +1559,7 @@ impl<'a> Parser<'a> {
                 return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err));
             }
         };
-        let kind = if es.len() == 1 && !trailing_comma {
+        let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) {
             // `(e)` is parenthesized `e`.
             ExprKind::Paren(es.into_iter().next().unwrap())
         } else {
@@ -1632,7 +1634,7 @@ impl<'a> Parser<'a> {
             && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
         {
             if qself.is_some() {
-                self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
+                self.psess.gated_spans.gate(sym::more_qualified_paths, path.span);
             }
             return expr;
         } else {
@@ -1682,13 +1684,13 @@ impl<'a> Parser<'a> {
             && (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
         {
             // We're probably inside of a `Path<'a>` that needs a turbofish
-            self.dcx().emit_err(errors::UnexpectedTokenAfterLabel {
+            let guar = self.dcx().emit_err(errors::UnexpectedTokenAfterLabel {
                 span: self.token.span,
                 remove_label: None,
                 enclose_in_block: None,
             });
             consume_colon = false;
-            Ok(self.mk_expr_err(lo))
+            Ok(self.mk_expr_err(lo, guar))
         } else {
             let mut err = errors::UnexpectedTokenAfterLabel {
                 span: self.token.span,
@@ -1757,27 +1759,28 @@ impl<'a> Parser<'a> {
         &self,
         ident: Ident,
         mk_lit_char: impl FnOnce(Symbol, Span) -> L,
-        err: impl FnOnce(&Self) -> DiagnosticBuilder<'a>,
+        err: impl FnOnce(&Self) -> Diag<'a>,
     ) -> L {
         assert!(could_be_unclosed_char_literal(ident));
-        if let Some(diag) = self.dcx().steal_diagnostic(ident.span, StashKey::LifetimeIsChar) {
-            diag.with_span_suggestion_verbose(
-                ident.span.shrink_to_hi(),
-                "add `'` to close the char literal",
-                "'",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
-        } else {
-            err(self)
-                .with_span_suggestion_verbose(
+        self.dcx()
+            .try_steal_modify_and_emit_err(ident.span, StashKey::LifetimeIsChar, |err| {
+                err.span_suggestion_verbose(
                     ident.span.shrink_to_hi(),
                     "add `'` to close the char literal",
                     "'",
                     Applicability::MaybeIncorrect,
-                )
-                .emit();
-        }
+                );
+            })
+            .unwrap_or_else(|| {
+                err(self)
+                    .with_span_suggestion_verbose(
+                        ident.span.shrink_to_hi(),
+                        "add `'` to close the char literal",
+                        "'",
+                        Applicability::MaybeIncorrect,
+                    )
+                    .emit()
+            });
         let name = ident.without_first_quote().name;
         mk_lit_char(name, ident.span)
     }
@@ -1818,7 +1821,7 @@ impl<'a> Parser<'a> {
         let kind = ExprKind::Yeet(self.parse_expr_opt()?);
 
         let span = lo.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::yeet_expr, span);
+        self.psess.gated_spans.gate(sym::yeet_expr, span);
         let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
@@ -1828,7 +1831,7 @@ impl<'a> Parser<'a> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Become(self.parse_expr()?);
         let span = lo.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::explicit_tail_calls, span);
+        self.psess.gated_spans.gate(sym::explicit_tail_calls, span);
         let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
@@ -1872,12 +1875,12 @@ impl<'a> Parser<'a> {
                             | ExprKind::Block(_, None)
                     )
                 {
-                    self.sess.buffer_lint_with_diagnostic(
+                    self.psess.buffer_lint_with_diagnostic(
                         BREAK_WITH_LABEL_AND_LOOP,
                         lo.to(expr.span),
                         ast::CRATE_NODE_ID,
                         "this labeled break expression is easy to confuse with an unlabeled break with a labeled value expression",
-                        BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
+                        BuiltinLintDiag::BreakWithLabelAndLoop(expr.span),
                     );
                 }
 
@@ -1923,7 +1926,7 @@ impl<'a> Parser<'a> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Yield(self.parse_expr_opt()?);
         let span = lo.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::yield_expr, span);
+        self.psess.gated_spans.gate(sym::yield_expr, span);
         let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
@@ -1948,11 +1951,11 @@ impl<'a> Parser<'a> {
         self.bump(); // `builtin`
         self.bump(); // `#`
 
-        let Some((ident, false)) = self.token.ident() else {
+        let Some((ident, IdentIsRaw::No)) = self.token.ident() else {
             let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
             return Err(err);
         };
-        self.sess.gated_spans.gate(sym::builtin_syntax, ident.span);
+        self.psess.gated_spans.gate(sym::builtin_syntax, ident.span);
         self.bump();
 
         self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?;
@@ -2037,7 +2040,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, L> {
         if let token::Interpolated(nt) = &self.token.kind
             && let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
-            && matches!(e.kind, ExprKind::Err)
+            && matches!(e.kind, ExprKind::Err(_))
         {
             let mut err = self
                 .dcx()
@@ -2140,12 +2143,12 @@ impl<'a> Parser<'a> {
                     Err(err) => {
                         let span = token.uninterpolated_span();
                         self.bump();
-                        report_lit_error(self.sess, err, lit, span);
+                        let guar = report_lit_error(self.psess, err, lit, span);
                         // Pack possible quotes and prefixes from the original literal into
                         // the error literal's symbol so they can be pretty-printed faithfully.
                         let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
                         let symbol = Symbol::intern(&suffixless_lit.to_string());
-                        let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+                        let lit = token::Lit::new(token::Err(guar), symbol, lit.suffix);
                         Some(
                             MetaItemLit::from_token_lit(lit, span)
                                 .unwrap_or_else(|_| unreachable!()),
@@ -2205,7 +2208,7 @@ impl<'a> Parser<'a> {
         let mut snapshot = self.create_snapshot_for_diagnostic();
         match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
             Ok(arr) => {
-                self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
+                let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
                     span: arr.span,
                     sub: errors::ArrayBracketsInsteadOfSpacesSugg {
                         left: lo,
@@ -2214,7 +2217,7 @@ impl<'a> Parser<'a> {
                 });
 
                 self.restore_snapshot(snapshot);
-                Some(self.mk_expr_err(arr.span))
+                Some(self.mk_expr_err(arr.span, guar))
             }
             Err(e) => {
                 e.cancel();
@@ -2233,7 +2236,7 @@ impl<'a> Parser<'a> {
         }
 
         if self.token.kind == token::Comma {
-            if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) {
+            if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) {
                 return Ok(());
             }
             let mut snapshot = self.create_snapshot_for_diagnostic();
@@ -2309,7 +2312,7 @@ impl<'a> Parser<'a> {
             let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
             let span = lo.to(self.prev_token.span);
 
-            self.sess.gated_spans.gate(sym::closure_lifetime_binder, span);
+            self.psess.gated_spans.gate(sym::closure_lifetime_binder, span);
 
             ClosureBinder::For { span, generic_params: lifetime_defs }
         } else {
@@ -2351,12 +2354,12 @@ impl<'a> Parser<'a> {
         match coroutine_kind {
             Some(CoroutineKind::Async { span, .. }) => {
                 // Feature-gate `async ||` closures.
-                self.sess.gated_spans.gate(sym::async_closure, span);
+                self.psess.gated_spans.gate(sym::async_closure, span);
             }
             Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
                 // Feature-gate `gen ||` and `async gen ||` closures.
                 // FIXME(gen_blocks): This perhaps should be a different gate.
-                self.sess.gated_spans.gate(sym::gen_blocks, span);
+                self.psess.gated_spans.gate(sym::gen_blocks, span);
             }
             None => {}
         }
@@ -2368,7 +2371,10 @@ impl<'a> Parser<'a> {
             // It is likely that the closure body is a block but where the
             // braces have been removed. We will recover and eat the next
             // statements later in the parsing process.
-            body = self.mk_expr_err(body.span);
+            body = self.mk_expr_err(
+                body.span,
+                self.dcx().span_delayed_bug(body.span, "recovered a closure body as a block"),
+            );
         }
 
         let body_span = body.span;
@@ -2483,7 +2489,7 @@ impl<'a> Parser<'a> {
                 ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
                     if let ExprKind::Block(_, None) = right.kind =>
                 {
-                    this.dcx().emit_err(errors::IfExpressionMissingThenBlock {
+                    let guar = this.dcx().emit_err(errors::IfExpressionMissingThenBlock {
                         if_span: lo,
                         missing_then_block_sub:
                             errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(
@@ -2491,14 +2497,14 @@ impl<'a> Parser<'a> {
                             ),
                         let_else_sub: None,
                     });
-                    std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
+                    std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi(), guar))
                 }
                 ExprKind::Block(_, None) => {
-                    this.dcx().emit_err(errors::IfExpressionMissingCondition {
+                    let guar = this.dcx().emit_err(errors::IfExpressionMissingCondition {
                         if_span: lo.with_neighbor(cond.span).shrink_to_hi(),
-                        block_span: self.sess.source_map().start_point(cond_span),
+                        block_span: self.psess.source_map().start_point(cond_span),
                     });
-                    std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi()))
+                    std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi(), guar))
                 }
                 _ => {
                     return None;
@@ -2518,14 +2524,14 @@ impl<'a> Parser<'a> {
                 let let_else_sub = matches!(cond.kind, ExprKind::Let(..))
                     .then(|| errors::IfExpressionLetSomeSub { if_span: lo.until(cond_span) });
 
-                self.dcx().emit_err(errors::IfExpressionMissingThenBlock {
+                let guar = self.dcx().emit_err(errors::IfExpressionMissingThenBlock {
                     if_span: lo,
                     missing_then_block_sub: errors::IfExpressionMissingThenBlockSub::AddThenBlock(
                         cond_span.shrink_to_hi(),
                     ),
                     let_else_sub,
                 });
-                self.mk_block_err(cond_span.shrink_to_hi())
+                self.mk_block_err(cond_span.shrink_to_hi(), guar)
             }
         } else {
             let attrs = self.parse_outer_attributes()?; // For recovery.
@@ -2588,7 +2594,7 @@ impl<'a> Parser<'a> {
 
         if let ExprKind::Let(_, _, _, None) = cond.kind {
             // Remove the last feature gating of a `let` expression since it's stable.
-            self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
+            self.psess.gated_spans.ungate_last(sym::let_chains, cond.span);
         }
 
         Ok(cond)
@@ -2683,23 +2689,20 @@ impl<'a> Parser<'a> {
         branch_span: Span,
         attrs: AttrWrapper,
     ) {
-        if attrs.is_empty() {
-            return;
+        if !attrs.is_empty()
+            && let [x0 @ xn] | [x0, .., xn] = &*attrs.take_for_recovery(self.psess)
+        {
+            let attributes = x0.span.to(xn.span);
+            let last = xn.span;
+            let ctx = if is_ctx_else { "else" } else { "if" };
+            self.dcx().emit_err(errors::OuterAttributeNotAllowedOnIfElse {
+                last,
+                branch_span,
+                ctx_span,
+                ctx: ctx.to_string(),
+                attributes,
+            });
         }
-
-        let attrs: &[ast::Attribute] = &attrs.take_for_recovery(self.sess);
-        let (attributes, last) = match attrs {
-            [] => return,
-            [x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
-        };
-        let ctx = if is_ctx_else { "else" } else { "if" };
-        self.dcx().emit_err(errors::OuterAttributeNotAllowedOnIfElse {
-            last,
-            branch_span,
-            ctx_span,
-            ctx: ctx.to_string(),
-            attributes,
-        });
     }
 
     fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
@@ -2784,7 +2787,7 @@ impl<'a> Parser<'a> {
             self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await);
 
         if is_await {
-            self.sess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
+            self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
         }
 
         let kind = if is_await { ForLoopKind::ForAwait } else { ForLoopKind::For };
@@ -2795,9 +2798,10 @@ impl<'a> Parser<'a> {
             && !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
             && self.may_recover()
         {
-            self.dcx()
+            let guar = self
+                .dcx()
                 .emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() });
-            let err_expr = self.mk_expr(expr.span, ExprKind::Err);
+            let err_expr = self.mk_expr(expr.span, ExprKind::Err(guar));
             let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span);
             return Ok(self.mk_expr(
                 lo.to(self.prev_token.span),
@@ -2922,7 +2926,7 @@ impl<'a> Parser<'a> {
                         attrs: Default::default(),
                         pat: self.mk_pat(span, ast::PatKind::Err(guar)),
                         guard: None,
-                        body: Some(self.mk_expr_err(span)),
+                        body: Some(self.mk_expr_err(span, guar)),
                         span,
                         id: DUMMY_NODE_ID,
                         is_placeholder: false,
@@ -2957,7 +2961,7 @@ impl<'a> Parser<'a> {
         let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
             let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
 
-            this.dcx().emit_err(errors::MatchArmBodyWithoutBraces {
+            let guar = this.dcx().emit_err(errors::MatchArmBodyWithoutBraces {
                 statements: span,
                 arrow: arrow_span,
                 num_statements: stmts.len(),
@@ -2970,7 +2974,7 @@ impl<'a> Parser<'a> {
                     errors::MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
                 },
             });
-            this.mk_expr_err(span)
+            this.mk_expr_err(span, guar)
         };
         // We might have either a `,` -> `;` typo, or a block without braces. We need
         // a more subtle parsing strategy.
@@ -3044,7 +3048,7 @@ impl<'a> Parser<'a> {
                     |x| {
                         // Don't gate twice
                         if !pat.contains_never_pattern() {
-                            this.sess.gated_spans.gate(sym::never_patterns, pat.span);
+                            this.psess.gated_spans.gate(sym::never_patterns, pat.span);
                         }
                         x
                     },
@@ -3089,17 +3093,17 @@ impl<'a> Parser<'a> {
                 if !require_comma {
                     arm_body = Some(expr);
                     this.eat(&token::Comma);
-                    Ok(false)
+                    Ok(Recovered::No)
                 } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
                     arm_body = Some(body);
-                    Ok(true)
+                    Ok(Recovered::Yes)
                 } else {
                     let expr_span = expr.span;
                     arm_body = Some(expr);
                     this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
                         .map_err(|mut err| {
                             if this.token == token::FatArrow {
-                                let sm = this.sess.source_map();
+                                let sm = this.psess.source_map();
                                 if let Ok(expr_lines) = sm.span_to_lines(expr_span)
                                     && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
                                     && arm_start_lines.lines[0].end_col
@@ -3173,7 +3177,7 @@ impl<'a> Parser<'a> {
                         this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
                             span: arm_span.shrink_to_hi(),
                         });
-                        return Ok(true);
+                        return Ok(Recovered::Yes);
                     }
                     Err(err)
                 });
@@ -3223,10 +3227,10 @@ impl<'a> Parser<'a> {
         if has_let_expr {
             if does_not_have_bin_op {
                 // Remove the last feature gating of a `let` expression since it's stable.
-                self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
+                self.psess.gated_spans.ungate_last(sym::let_chains, cond.span);
             }
             let span = if_span.to(cond.span);
-            self.sess.gated_spans.gate(sym::if_let_guard, span);
+            self.psess.gated_spans.gate(sym::if_let_guard, span);
         }
         Ok(Some(cond))
     }
@@ -3317,7 +3321,7 @@ impl<'a> Parser<'a> {
             Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
         } else {
             let span = span_lo.to(body.span);
-            self.sess.gated_spans.gate(sym::try_blocks, span);
+            self.psess.gated_spans.gate(sym::try_blocks, span);
             Ok(self.mk_expr_with_attrs(span, ExprKind::TryBlock(body), attrs))
         }
     }
@@ -3355,7 +3359,7 @@ impl<'a> Parser<'a> {
                 // `async` blocks are stable
             }
             GenBlockKind::Gen | GenBlockKind::AsyncGen => {
-                self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.prev_token.span));
+                self.psess.gated_spans.gate(sym::gen_blocks, lo.to(self.prev_token.span));
             }
         }
         let capture_clause = self.parse_capture_clause()?;
@@ -3431,14 +3435,20 @@ impl<'a> Parser<'a> {
         pth: ast::Path,
         recover: bool,
         close_delim: Delimiter,
-    ) -> PResult<'a, (ThinVec<ExprField>, ast::StructRest, bool)> {
+    ) -> PResult<
+        'a,
+        (
+            ThinVec<ExprField>,
+            ast::StructRest,
+            Option<ErrorGuaranteed>, /* async blocks are forbidden in Rust 2015 */
+        ),
+    > {
         let mut fields = ThinVec::new();
         let mut base = ast::StructRest::None;
-        let mut recover_async = false;
+        let mut recovered_async = None;
         let in_if_guard = self.restrictions.contains(Restrictions::IN_IF_GUARD);
 
-        let mut async_block_err = |e: &mut Diagnostic, span: Span| {
-            recover_async = true;
+        let async_block_err = |e: &mut Diag<'_>, span: Span| {
             errors::AsyncBlockIn2015 { span }.add_to_diagnostic(e);
             errors::HelpUseLatestEdition::new().add_to_diagnostic(e);
         };
@@ -3463,9 +3473,34 @@ impl<'a> Parser<'a> {
                 break;
             }
 
-            let recovery_field = self.find_struct_error_after_field_looking_code();
+            // Peek the field's ident before parsing its expr in order to emit better diagnostics.
+            let peek = self
+                .token
+                .ident()
+                .filter(|(ident, is_raw)| {
+                    (!ident.is_reserved() || matches!(is_raw, IdentIsRaw::Yes))
+                        && self.look_ahead(1, |tok| *tok == token::Colon)
+                })
+                .map(|(ident, _)| ident);
+
+            // We still want a field even if its expr didn't parse.
+            let field_ident = |this: &Self, guar: ErrorGuaranteed| {
+                peek.map(|ident| {
+                    let span = ident.span;
+                    ExprField {
+                        ident,
+                        span,
+                        expr: this.mk_expr_err(span, guar),
+                        is_shorthand: false,
+                        attrs: AttrVec::new(),
+                        id: DUMMY_NODE_ID,
+                        is_placeholder: false,
+                    }
+                })
+            };
+
             let parsed_field = match self.parse_expr_field() {
-                Ok(f) => Some(f),
+                Ok(f) => Ok(f),
                 Err(mut e) => {
                     if pth == kw::Async {
                         async_block_err(&mut e, pth.span);
@@ -3497,7 +3532,10 @@ impl<'a> Parser<'a> {
                         return Err(e);
                     }
 
-                    e.emit();
+                    let guar = e.emit();
+                    if pth == kw::Async {
+                        recovered_async = Some(guar);
+                    }
 
                     // If the next token is a comma, then try to parse
                     // what comes next as additional fields, rather than
@@ -3509,18 +3547,20 @@ impl<'a> Parser<'a> {
                         }
                     }
 
-                    None
+                    Err(guar)
                 }
             };
 
-            let is_shorthand = parsed_field.as_ref().is_some_and(|f| f.is_shorthand);
+            let is_shorthand = parsed_field.as_ref().is_ok_and(|f| f.is_shorthand);
             // A shorthand field can be turned into a full field with `:`.
             // We should point this out.
             self.check_or_expected(!is_shorthand, TokenType::Token(token::Colon));
 
             match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) {
                 Ok(_) => {
-                    if let Some(f) = parsed_field.or(recovery_field) {
+                    if let Some(f) =
+                        parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar)).ok()
+                    {
                         // Only include the field if there's no parse error for the field name.
                         fields.push(f);
                     }
@@ -3530,8 +3570,7 @@ impl<'a> Parser<'a> {
                         async_block_err(&mut e, pth.span);
                     } else {
                         e.span_label(pth.span, "while parsing this struct");
-                        if let Some(f) = recovery_field {
-                            fields.push(f);
+                        if peek.is_some() {
                             e.span_suggestion(
                                 self.prev_token.span.shrink_to_hi(),
                                 "try adding a comma",
@@ -3543,13 +3582,18 @@ impl<'a> Parser<'a> {
                     if !recover {
                         return Err(e);
                     }
-                    e.emit();
+                    let guar = e.emit();
+                    if pth == kw::Async {
+                        recovered_async = Some(guar);
+                    } else if let Some(f) = field_ident(self, guar) {
+                        fields.push(f);
+                    }
                     self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
                     self.eat(&token::Comma);
                 }
             }
         }
-        Ok((fields, base, recover_async))
+        Ok((fields, base, recovered_async))
     }
 
     /// Precondition: already parsed the '{'.
@@ -3560,39 +3604,18 @@ impl<'a> Parser<'a> {
         recover: bool,
     ) -> PResult<'a, P<Expr>> {
         let lo = pth.span;
-        let (fields, base, recover_async) =
+        let (fields, base, recovered_async) =
             self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
         let span = lo.to(self.token.span);
         self.expect(&token::CloseDelim(Delimiter::Brace))?;
-        let expr = if recover_async {
-            ExprKind::Err
+        let expr = if let Some(guar) = recovered_async {
+            ExprKind::Err(guar)
         } else {
             ExprKind::Struct(P(ast::StructExpr { qself, path: pth, fields, rest: base }))
         };
         Ok(self.mk_expr(span, expr))
     }
 
-    /// Use in case of error after field-looking code: `S { foo: () with a }`.
-    fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> {
-        match self.token.ident() {
-            Some((ident, is_raw))
-                if (is_raw || !ident.is_reserved())
-                    && self.look_ahead(1, |t| *t == token::Colon) =>
-            {
-                Some(ast::ExprField {
-                    ident,
-                    span: self.token.span,
-                    expr: self.mk_expr_err(self.token.span),
-                    is_shorthand: false,
-                    attrs: AttrVec::new(),
-                    id: DUMMY_NODE_ID,
-                    is_placeholder: false,
-                })
-            }
-            _ => None,
-        }
-    }
-
     fn recover_struct_comma_after_dotdot(&mut self, span: Span) {
         if self.token != token::Comma {
             return;
@@ -3716,8 +3739,8 @@ impl<'a> Parser<'a> {
         limits: RangeLimits,
     ) -> ExprKind {
         if end.is_none() && limits == RangeLimits::Closed {
-            self.inclusive_range_with_incorrect_end();
-            ExprKind::Err
+            let guar = self.inclusive_range_with_incorrect_end();
+            ExprKind::Err(guar)
         } else {
             ExprKind::Range(start, end, limits)
         }
@@ -3754,8 +3777,8 @@ impl<'a> Parser<'a> {
         self.mk_expr_with_attrs(span, kind, AttrVec::new())
     }
 
-    pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
-        self.mk_expr(span, ExprKind::Err)
+    pub(super) fn mk_expr_err(&self, span: Span, guar: ErrorGuaranteed) -> P<Expr> {
+        self.mk_expr(span, ExprKind::Err(guar))
     }
 
     /// Create expression span ensuring the span of the parent node
@@ -3853,7 +3876,7 @@ impl MutVisitor for CondChecker<'_> {
                             comparison: self.comparison,
                         }));
                 } else {
-                    self.parser.sess.gated_spans.gate(sym::let_chains, span);
+                    self.parser.psess.gated_spans.gate(sym::let_chains, span);
                 }
             }
             ExprKind::Binary(Spanned { node: BinOpKind::And, .. }, _, _) => {
@@ -3947,7 +3970,8 @@ impl MutVisitor for CondChecker<'_> {
             | ExprKind::Become(_)
             | ExprKind::IncludedBytes(_)
             | ExprKind::FormatArgs(_)
-            | ExprKind::Err => {
+            | ExprKind::Err(_)
+            | ExprKind::Dummy => {
                 // These would forbid any let expressions they contain already.
             }
         }
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index e059e707491..263b2a90643 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
                         type_err.cancel();
 
                         let body_sp = pred_lo.to(snapshot.prev_token.span);
-                        let map = self.sess.source_map();
+                        let map = self.psess.source_map();
 
                         self.dcx().emit_err(WhereClauseBeforeTupleStructBody {
                             span: where_sp,
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 8050b34956c..79492fe62ed 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1,8 +1,12 @@
 use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
-use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
+use super::{
+    AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing,
+    TrailingToken,
+};
 use crate::errors::{self, MacroExpandsToAdtField};
 use crate::fluent_generated as fluent;
+use ast::token::IdentIsRaw;
 use rustc_ast::ast::*;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, TokenKind};
@@ -559,7 +563,16 @@ impl<'a> Parser<'a> {
 
         let constness = self.parse_constness(Case::Sensitive);
         if let Const::Yes(span) = constness {
-            self.sess.gated_spans.gate(sym::const_trait_impl, span);
+            self.psess.gated_spans.gate(sym::const_trait_impl, span);
+        }
+
+        // Parse stray `impl async Trait`
+        if (self.token.uninterpolated_span().at_least_rust_2018()
+            && self.token.is_keyword(kw::Async))
+            || self.is_kw_followed_by_ident(kw::Async)
+        {
+            self.bump();
+            self.dcx().emit_err(errors::AsyncImpl { span: self.prev_token.span });
         }
 
         let polarity = self.parse_polarity();
@@ -591,7 +604,11 @@ impl<'a> Parser<'a> {
         let ty_second = if self.token == token::DotDot {
             // We need to report this error after `cfg` expansion for compatibility reasons
             self.bump(); // `..`, do not add it to expected tokens
-            Some(self.mk_ty(self.prev_token.span, TyKind::Err))
+
+            // AST validation later detects this `TyKind::Dummy` and emits an
+            // error. (#121072 will hopefully remove all this special handling
+            // of the obsolete `impl Trait for ..` and then this can go away.)
+            Some(self.mk_ty(self.prev_token.span, TyKind::Dummy))
         } else if has_for || self.token.can_begin_type() {
             Some(self.parse_ty()?)
         } else {
@@ -682,7 +699,7 @@ impl<'a> Parser<'a> {
             None
         };
         let span = span.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::fn_delegation, span);
+        self.psess.gated_spans.gate(sym::fn_delegation, span);
 
         let ident = path.segments.last().map(|seg| seg.ident).unwrap_or(Ident::empty());
         Ok((
@@ -842,7 +859,7 @@ impl<'a> Parser<'a> {
         let unsafety = self.parse_unsafety(Case::Sensitive);
         // Parse optional `auto` prefix.
         let is_auto = if self.eat_keyword(kw::Auto) {
-            self.sess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
+            self.psess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
             IsAuto::Yes
         } else {
             IsAuto::No
@@ -877,7 +894,7 @@ impl<'a> Parser<'a> {
                 self.dcx().emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
             }
 
-            self.sess.gated_spans.gate(sym::trait_alias, whole_span);
+            self.psess.gated_spans.gate(sym::trait_alias, whole_span);
 
             Ok((ident, ItemKind::TraitAlias(generics, bounds)))
         } else {
@@ -954,11 +971,17 @@ impl<'a> Parser<'a> {
 
         let after_where_clause = self.parse_where_clause()?;
 
-        let where_clauses = (
-            TyAliasWhereClause(before_where_clause.has_where_token, before_where_clause.span),
-            TyAliasWhereClause(after_where_clause.has_where_token, after_where_clause.span),
-        );
-        let where_predicates_split = before_where_clause.predicates.len();
+        let where_clauses = TyAliasWhereClauses {
+            before: TyAliasWhereClause {
+                has_where_token: before_where_clause.has_where_token,
+                span: before_where_clause.span,
+            },
+            after: TyAliasWhereClause {
+                has_where_token: after_where_clause.has_where_token,
+                span: after_where_clause.span,
+            },
+            split: before_where_clause.predicates.len(),
+        };
         let mut predicates = before_where_clause.predicates;
         predicates.extend(after_where_clause.predicates);
         let where_clause = WhereClause {
@@ -977,7 +1000,6 @@ impl<'a> Parser<'a> {
                 defaultness,
                 generics,
                 where_clauses,
-                where_predicates_split,
                 bounds,
                 ty,
             })),
@@ -1063,7 +1085,7 @@ impl<'a> Parser<'a> {
 
     fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
         match self.token.ident() {
-            Some((ident @ Ident { name: kw::Underscore, .. }, false)) => {
+            Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => {
                 self.bump();
                 Ok(ident)
             }
@@ -1187,7 +1209,7 @@ impl<'a> Parser<'a> {
 
     fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &'static str) -> Option<T> {
         // FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
-        let span = self.sess.source_map().guess_head_span(span);
+        let span = self.psess.source_map().guess_head_span(span);
         let descr = kind.descr();
         self.dcx().emit_err(errors::BadItemKind { span, descr, ctx });
         None
@@ -1310,7 +1332,7 @@ impl<'a> Parser<'a> {
         // Check the span for emptiness instead of the list of parameters in order to correctly
         // recognize and subsequently flag empty parameter lists (`<>`) as unstable.
         if !generics.span.is_empty() {
-            self.sess.gated_spans.gate(sym::generic_const_items, generics.span);
+            self.psess.gated_spans.gate(sym::generic_const_items, generics.span);
         }
 
         // Parse the type of a constant item. That is, the `":" $ty` fragment.
@@ -1344,7 +1366,7 @@ impl<'a> Parser<'a> {
                 name: ident.span,
                 body: expr.span,
                 sugg: if !after_where_clause.has_where_token {
-                    self.sess.source_map().span_to_snippet(expr.span).ok().map(|body| {
+                    self.psess.source_map().span_to_snippet(expr.span).ok().map(|body| {
                         errors::WhereClauseBeforeConstBodySugg {
                             left: before_where_clause.span.shrink_to_lo(),
                             snippet: body,
@@ -1379,7 +1401,7 @@ impl<'a> Parser<'a> {
         };
 
         if where_clause.has_where_token {
-            self.sess.gated_spans.gate(sym::generic_const_items, where_clause.span);
+            self.psess.gated_spans.gate(sym::generic_const_items, where_clause.span);
         }
 
         generics.where_clause = where_clause;
@@ -1440,7 +1462,7 @@ impl<'a> Parser<'a> {
         let (variants, _) = if self.token == TokenKind::Semi {
             self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
             self.bump();
-            (thin_vec![], false)
+            (thin_vec![], Trailing::No)
         } else {
             self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
                 .map_err(|mut err| {
@@ -1517,10 +1539,10 @@ impl<'a> Parser<'a> {
                                 err.span_label(span, "while parsing this enum");
                                 err.help(help);
                                 err.emit();
-                                (thin_vec![], true)
+                                (thin_vec![], Recovered::Yes)
                             }
                         };
-                    VariantData::Struct { fields, recovered }
+                    VariantData::Struct { fields, recovered: recovered.into() }
                 } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
                     let body = match this.parse_tuple_struct_body() {
                         Ok(body) => body,
@@ -1605,7 +1627,7 @@ impl<'a> Parser<'a> {
                     class_name.span,
                     generics.where_clause.has_where_token,
                 )?;
-                VariantData::Struct { fields, recovered }
+                VariantData::Struct { fields, recovered: recovered.into() }
             }
         // No `where` so: `struct Foo<T>;`
         } else if self.eat(&token::Semi) {
@@ -1617,7 +1639,7 @@ impl<'a> Parser<'a> {
                 class_name.span,
                 generics.where_clause.has_where_token,
             )?;
-            VariantData::Struct { fields, recovered }
+            VariantData::Struct { fields, recovered: recovered.into() }
         // Tuple-style struct definition with optional where-clause.
         } else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
             let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
@@ -1646,14 +1668,14 @@ impl<'a> Parser<'a> {
                 class_name.span,
                 generics.where_clause.has_where_token,
             )?;
-            VariantData::Struct { fields, recovered }
+            VariantData::Struct { fields, recovered: recovered.into() }
         } else if self.token == token::OpenDelim(Delimiter::Brace) {
             let (fields, recovered) = self.parse_record_struct_body(
                 "union",
                 class_name.span,
                 generics.where_clause.has_where_token,
             )?;
-            VariantData::Struct { fields, recovered }
+            VariantData::Struct { fields, recovered: recovered.into() }
         } else {
             let token_str = super::token_descr(&self.token);
             let msg = format!("expected `where` or `{{` after union name, found {token_str}");
@@ -1670,14 +1692,14 @@ impl<'a> Parser<'a> {
         adt_ty: &str,
         ident_span: Span,
         parsed_where: bool,
-    ) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> {
+    ) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
         let mut fields = ThinVec::new();
-        let mut recovered = false;
+        let mut recovered = Recovered::No;
         if self.eat(&token::OpenDelim(Delimiter::Brace)) {
             while self.token != token::CloseDelim(Delimiter::Brace) {
                 let field = self.parse_field_def(adt_ty).map_err(|e| {
                     self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
-                    recovered = true;
+                    recovered = Recovered::Yes;
                     e
                 });
                 match field {
@@ -1876,7 +1898,7 @@ impl<'a> Parser<'a> {
 
     fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
         if let Err(err) = self.expect(&token::Colon) {
-            let sm = self.sess.source_map();
+            let sm = self.psess.source_map();
             let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start());
             let semi_typo = self.token.kind == token::Semi
                 && self.look_ahead(1, |t| {
@@ -1917,7 +1939,7 @@ impl<'a> Parser<'a> {
         if self.token.kind == token::Not {
             if let Err(mut err) = self.unexpected::<FieldDef>() {
                 // Encounter the macro invocation
-                err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
+                err.subdiagnostic(self.dcx(), MacroExpandsToAdtField { adt_ty });
                 return Err(err);
             }
         }
@@ -1948,8 +1970,8 @@ impl<'a> Parser<'a> {
     fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
         let (ident, is_raw) = self.ident_or_err(true)?;
         if ident.name == kw::Underscore {
-            self.sess.gated_spans.gate(sym::unnamed_fields, lo);
-        } else if !is_raw && ident.is_reserved() {
+            self.psess.gated_spans.gate(sym::unnamed_fields, lo);
+        } else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
             let snapshot = self.create_snapshot_for_diagnostic();
             let err = if self.check_fn_front_matter(false, Case::Sensitive) {
                 let inherited_vis = Visibility {
@@ -2058,7 +2080,7 @@ impl<'a> Parser<'a> {
             return self.unexpected();
         };
 
-        self.sess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span));
+        self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span));
         Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false })))
     }
 
@@ -2311,11 +2333,11 @@ impl<'a> Parser<'a> {
             let _ = self.parse_expr()?;
             self.expect_semi()?; // `;`
             let span = eq_sp.to(self.prev_token.span);
-            self.dcx().emit_err(errors::FunctionBodyEqualsExpr {
+            let guar = self.dcx().emit_err(errors::FunctionBodyEqualsExpr {
                 span,
                 sugg: errors::FunctionBodyEqualsExprSugg { eq: eq_sp, semi: self.prev_token.span },
             });
-            (AttrVec::new(), Some(self.mk_block_err(span)))
+            (AttrVec::new(), Some(self.mk_block_err(span, guar)))
         } else {
             let expected = if req_body {
                 &[token::OpenDelim(Delimiter::Brace)][..]
@@ -2336,10 +2358,13 @@ impl<'a> Parser<'a> {
                             .into_iter()
                             .any(|s| self.prev_token.is_ident_named(s));
 
-                        err.subdiagnostic(errors::FnTraitMissingParen {
-                            span: self.prev_token.span,
-                            machine_applicable,
-                        });
+                        err.subdiagnostic(
+                            self.dcx(),
+                            errors::FnTraitMissingParen {
+                                span: self.prev_token.span,
+                                machine_applicable,
+                            },
+                        );
                     }
                     return Err(err);
                 }
@@ -2435,7 +2460,7 @@ impl<'a> Parser<'a> {
 
         match coroutine_kind {
             Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
-                self.sess.gated_spans.gate(sym::gen_blocks, span);
+                self.psess.gated_spans.gate(sym::gen_blocks, span);
             }
             Some(CoroutineKind::Async { .. }) | None => {}
         }
@@ -2445,8 +2470,8 @@ impl<'a> Parser<'a> {
             // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
             // account for this.
             match self.expect_one_of(&[], &[]) {
-                Ok(true) => {}
-                Ok(false) => unreachable!(),
+                Ok(Recovered::Yes) => {}
+                Ok(Recovered::No) => unreachable!(),
                 Err(mut err) => {
                     // Qualifier keywords ordering check
                     enum WrongKw {
@@ -2628,13 +2653,13 @@ impl<'a> Parser<'a> {
             p.recover_diff_marker();
             let snapshot = p.create_snapshot_for_diagnostic();
             let param = p.parse_param_general(req_name, first_param).or_else(|e| {
-                e.emit();
+                let guar = e.emit();
                 let lo = p.prev_token.span;
                 p.restore_snapshot(snapshot);
                 // Skip every token until next possible arg or end.
                 p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
                 // Create a placeholder argument for proper arg count (issue #34264).
-                Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span))))
+                Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)), guar))
             });
             // ...now that we've parsed the first argument, `self` is no longer allowed.
             first_param = false;
@@ -2671,8 +2696,8 @@ impl<'a> Parser<'a> {
                     return if let Some(ident) =
                         this.parameter_without_type(&mut err, pat, is_name_required, first_param)
                     {
-                        err.emit();
-                        Ok((dummy_arg(ident), TrailingToken::None))
+                        let guar = err.emit();
+                        Ok((dummy_arg(ident, guar), TrailingToken::None))
                     } else {
                         Err(err)
                     };
@@ -2724,7 +2749,7 @@ impl<'a> Parser<'a> {
     fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
         // Extract an identifier *after* having confirmed that the token is one.
         let expect_self_ident = |this: &mut Self| match this.token.ident() {
-            Some((ident, false)) => {
+            Some((ident, IdentIsRaw::No)) => {
                 this.bump();
                 ident
             }
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 623407eb380..12879dc429b 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -11,6 +11,7 @@ mod stmt;
 mod ty;
 
 use crate::lexer::UnmatchedDelim;
+use ast::token::IdentIsRaw;
 pub use attr_wrapper::AttrWrapper;
 pub use diagnostics::AttemptLocalParseRecovery;
 pub(crate) use expr::ForbiddenLetReason;
@@ -32,7 +33,7 @@ use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::PResult;
-use rustc_errors::{Applicability, DiagnosticBuilder, FatalError, MultiSpan};
+use rustc_errors::{Applicability, Diag, FatalError, MultiSpan};
 use rustc_session::parse::ParseSess;
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
 use rustc_span::{Span, DUMMY_SP};
@@ -127,7 +128,7 @@ pub enum Recovery {
 
 #[derive(Clone)]
 pub struct Parser<'a> {
-    pub sess: &'a ParseSess,
+    pub psess: &'a ParseSess,
     /// The current token.
     pub token: Token,
     /// The spacing for the current token.
@@ -357,6 +358,25 @@ pub enum FollowedByType {
     No,
 }
 
+/// Whether a function performed recovery
+#[derive(Copy, Clone, Debug)]
+pub enum Recovered {
+    No,
+    Yes,
+}
+
+impl From<Recovered> for bool {
+    fn from(r: Recovered) -> bool {
+        matches!(r, Recovered::Yes)
+    }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub enum Trailing {
+    No,
+    Yes,
+}
+
 #[derive(Clone, Copy, PartialEq, Eq)]
 pub enum TokenDescription {
     ReservedIdentifier,
@@ -394,12 +414,12 @@ pub(super) fn token_descr(token: &Token) -> String {
 
 impl<'a> Parser<'a> {
     pub fn new(
-        sess: &'a ParseSess,
+        psess: &'a ParseSess,
         stream: TokenStream,
         subparser_name: Option<&'static str>,
     ) -> Self {
         let mut parser = Parser {
-            sess,
+            psess,
             token: Token::dummy(),
             token_spacing: Spacing::Alone,
             prev_token: Token::dummy(),
@@ -455,11 +475,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
+    pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
-                Ok(false)
+                Ok(Recovered::No)
             } else {
                 self.unexpected_try_recover(t)
             }
@@ -475,13 +495,13 @@ impl<'a> Parser<'a> {
         &mut self,
         edible: &[TokenKind],
         inedible: &[TokenKind],
-    ) -> PResult<'a, bool /* recovered */> {
+    ) -> PResult<'a, Recovered> {
         if edible.contains(&self.token.kind) {
             self.bump();
-            Ok(false)
+            Ok(Recovered::No)
         } else if inedible.contains(&self.token.kind) {
             // leave it in the input
-            Ok(false)
+            Ok(Recovered::No)
         } else if self.token.kind != token::Eof
             && self.last_unexpected_token_span == Some(self.token.span)
         {
@@ -499,7 +519,7 @@ impl<'a> Parser<'a> {
     fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
         let (ident, is_raw) = self.ident_or_err(recover)?;
 
-        if !is_raw && ident.is_reserved() {
+        if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
             let err = self.expected_ident_found_err();
             if recover {
                 err.emit();
@@ -511,7 +531,7 @@ impl<'a> Parser<'a> {
         Ok(ident)
     }
 
-    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
+    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
         match self.token.ident() {
             Some(ident) => Ok(ident),
             None => self.expected_ident_found(recover),
@@ -568,7 +588,7 @@ impl<'a> Parser<'a> {
         }
 
         if case == Case::Insensitive
-            && let Some((ident, /* is_raw */ false)) = self.token.ident()
+            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
             && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
         {
             true
@@ -598,7 +618,7 @@ impl<'a> Parser<'a> {
         }
 
         if case == Case::Insensitive
-            && let Some((ident, /* is_raw */ false)) = self.token.ident()
+            && let Some((ident, IdentIsRaw::No)) = self.token.ident()
             && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
         {
             self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
@@ -694,7 +714,7 @@ impl<'a> Parser<'a> {
         }
         match self.token.kind.break_two_token_op() {
             Some((first, second)) if first == expected => {
-                let first_span = self.sess.source_map().start_point(self.token.span);
+                let first_span = self.psess.source_map().start_point(self.token.span);
                 let second_span = self.token.span.with_lo(first_span.hi());
                 self.token = Token::new(first, first_span);
                 // Keep track of this token - if we end token capturing now,
@@ -783,10 +803,10 @@ impl<'a> Parser<'a> {
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
+    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
         let mut first = true;
-        let mut recovered = false;
-        let mut trailing = false;
+        let mut recovered = Recovered::No;
+        let mut trailing = Trailing::No;
         let mut v = ThinVec::new();
 
         while !self.expect_any_with_type(kets, expect) {
@@ -800,12 +820,12 @@ impl<'a> Parser<'a> {
                 } else {
                     // check for separator
                     match self.expect(t) {
-                        Ok(false) /* not recovered */ => {
+                        Ok(Recovered::No) => {
                             self.current_closure.take();
                         }
-                        Ok(true) /* recovered */ => {
+                        Ok(Recovered::Yes) => {
                             self.current_closure.take();
-                            recovered = true;
+                            recovered = Recovered::Yes;
                             break;
                         }
                         Err(mut expect_err) => {
@@ -900,7 +920,7 @@ impl<'a> Parser<'a> {
                 }
             }
             if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
-                trailing = true;
+                trailing = Trailing::Yes;
                 break;
             }
 
@@ -914,7 +934,7 @@ impl<'a> Parser<'a> {
     fn recover_missing_braces_around_closure_body(
         &mut self,
         closure_spans: ClosureSpans,
-        mut expect_err: DiagnosticBuilder<'_>,
+        mut expect_err: Diag<'_>,
     ) -> PResult<'a, ()> {
         let initial_semicolon = self.token.span;
 
@@ -978,7 +998,7 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
+    ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
         self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
     }
 
@@ -990,9 +1010,9 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
-        if !recovered {
+        if matches!(recovered, Recovered::No) {
             self.eat(ket);
         }
         Ok((val, trailing))
@@ -1007,7 +1027,7 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         self.expect(bra)?;
         self.parse_seq_to_end(ket, sep, f)
     }
@@ -1019,7 +1039,7 @@ impl<'a> Parser<'a> {
         &mut self,
         delim: Delimiter,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         self.parse_unspanned_seq(
             &token::OpenDelim(delim),
             &token::CloseDelim(delim),
@@ -1034,7 +1054,7 @@ impl<'a> Parser<'a> {
     fn parse_paren_comma_seq<T>(
         &mut self,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
+    ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
     }
 
@@ -1193,7 +1213,7 @@ impl<'a> Parser<'a> {
     fn parse_closure_constness(&mut self) -> Const {
         let constness = self.parse_constness_(Case::Sensitive, true);
         if let Const::Yes(span) = constness {
-            self.sess.gated_spans.gate(sym::const_closures, span);
+            self.psess.gated_spans.gate(sym::const_closures, span);
         }
         constness
     }
@@ -1214,9 +1234,9 @@ impl<'a> Parser<'a> {
     /// Parses inline const expressions.
     fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
         if pat {
-            self.sess.gated_spans.gate(sym::inline_const_pat, span);
+            self.psess.gated_spans.gate(sym::inline_const_pat, span);
         } else {
-            self.sess.gated_spans.gate(sym::inline_const, span);
+            self.psess.gated_spans.gate(sym::inline_const, span);
         }
         self.eat_keyword(kw::Const);
         let (attrs, blk) = self.parse_inner_attrs_and_block()?;
@@ -1459,7 +1479,7 @@ impl<'a> Parser<'a> {
         match self.parse_str_lit() {
             Ok(str_lit) => Some(str_lit),
             Err(Some(lit)) => match lit.kind {
-                ast::LitKind::Err => None,
+                ast::LitKind::Err(_) => None,
                 _ => {
                     self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
                     None
@@ -1501,8 +1521,8 @@ impl<'a> Parser<'a> {
 
 pub(crate) fn make_unclosed_delims_error(
     unmatched: UnmatchedDelim,
-    sess: &ParseSess,
-) -> Option<DiagnosticBuilder<'_>> {
+    psess: &ParseSess,
+) -> Option<Diag<'_>> {
     // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
     // `unmatched_delims` only for error recovery in the `Parser`.
     let found_delim = unmatched.found_delim?;
@@ -1510,7 +1530,7 @@ pub(crate) fn make_unclosed_delims_error(
     if let Some(sp) = unmatched.unclosed_span {
         spans.push(sp);
     };
-    let err = sess.dcx.create_err(MismatchedClosingDelimiter {
+    let err = psess.dcx.create_err(MismatchedClosingDelimiter {
         spans,
         delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
         unmatched: unmatched.found_span,
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 071d6b72f3b..f1572a18a8b 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -201,6 +201,6 @@ impl<'a> Parser<'a> {
 
 /// The token is an identifier, but not `_`.
 /// We prohibit passing `_` to macros expecting `ident` for now.
-fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
+fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> {
     token.ident().filter(|(ident, _)| ident.name != kw::Underscore)
 }
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 12260ec95a5..88ae1b5420f 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -1,4 +1,4 @@
-use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken};
+use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken};
 use crate::errors::{
     self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
     DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
@@ -20,7 +20,7 @@ use rustc_ast::{
     PatField, PatFieldsRest, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
 };
 use rustc_ast_pretty::pprust;
-use rustc_errors::{Applicability, DiagnosticBuilder, PResult};
+use rustc_errors::{Applicability, Diag, PResult};
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::source_map::{respan, Spanned};
 use rustc_span::symbol::{kw, sym, Ident};
@@ -311,7 +311,7 @@ impl<'a> Parser<'a> {
             matches!(
                 &token.uninterpolate().kind,
                 token::FatArrow // e.g. `a | => 0,`.
-                | token::Ident(kw::If, false) // e.g. `a | if expr`.
+                | token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`.
                 | token::Eq // e.g. `let a | = 0`.
                 | token::Semi // e.g. `let a |;`.
                 | token::Colon // e.g. `let a | :`.
@@ -388,7 +388,7 @@ impl<'a> Parser<'a> {
         // Parse `?`, `.f`, `(arg0, arg1, ...)` or `[expr]` until they've all been eaten.
         if let Ok(expr) = snapshot
             .parse_expr_dot_or_call_with(
-                self.mk_expr_err(pat_span), // equivalent to transforming the parsed pattern into an `Expr`
+                self.mk_expr(pat_span, ExprKind::Dummy), // equivalent to transforming the parsed pattern into an `Expr`
                 pat_span,
                 AttrVec::new(),
             )
@@ -470,7 +470,7 @@ impl<'a> Parser<'a> {
             self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
         } else if self.eat(&token::Not) {
             // Parse `!`
-            self.sess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
+            self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
             PatKind::Never
         } else if self.eat_keyword(kw::Underscore) {
             // Parse `_`
@@ -566,7 +566,7 @@ impl<'a> Parser<'a> {
             match self.parse_literal_maybe_minus() {
                 Ok(begin) => {
                     let begin = match self.maybe_recover_trailing_expr(begin.span, false) {
-                        Some(_) => self.mk_expr_err(begin.span),
+                        Some(guar) => self.mk_expr_err(begin.span, guar),
                         None => begin,
                     };
 
@@ -696,7 +696,9 @@ impl<'a> Parser<'a> {
 
         // Here, `(pat,)` is a tuple pattern.
         // For backward compatibility, `(..)` is a tuple pattern as well.
-        Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
+        let paren_pattern =
+            fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest());
+        if paren_pattern {
             let pat = fields.into_iter().next().unwrap();
             let close_paren = self.prev_token.span;
 
@@ -714,10 +716,10 @@ impl<'a> Parser<'a> {
                         },
                     });
 
-                    self.parse_pat_range_begin_with(begin.clone(), form)?
+                    self.parse_pat_range_begin_with(begin.clone(), form)
                 }
                 // recover ranges with parentheses around the `(start)..`
-                PatKind::Err(_)
+                PatKind::Err(guar)
                     if self.may_recover()
                         && let Some(form) = self.parse_range_end() =>
                 {
@@ -729,15 +731,15 @@ impl<'a> Parser<'a> {
                         },
                     });
 
-                    self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)?
+                    self.parse_pat_range_begin_with(self.mk_expr_err(pat.span, *guar), form)
                 }
 
                 // (pat) with optional parentheses
-                _ => PatKind::Paren(pat),
+                _ => Ok(PatKind::Paren(pat)),
             }
         } else {
-            PatKind::Tuple(fields)
-        })
+            Ok(PatKind::Tuple(fields))
+        }
     }
 
     /// Parse a mutable binding with the `mut` token already eaten.
@@ -830,7 +832,7 @@ impl<'a> Parser<'a> {
 
     fn fatal_unexpected_non_pat(
         &mut self,
-        err: DiagnosticBuilder<'a>,
+        err: Diag<'a>,
         expected: Option<Expected>,
     ) -> PResult<'a, P<Pat>> {
         err.cancel();
@@ -841,9 +843,9 @@ impl<'a> Parser<'a> {
         let mut err = self.dcx().struct_span_err(self.token.span, msg);
         err.span_label(self.token.span, format!("expected {expected}"));
 
-        let sp = self.sess.source_map().start_point(self.token.span);
-        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+        let sp = self.psess.source_map().start_point(self.token.span);
+        if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp));
         }
 
         Err(err)
@@ -884,7 +886,7 @@ impl<'a> Parser<'a> {
         Ok(PatKind::Range(Some(begin), end, re))
     }
 
-    pub(super) fn inclusive_range_with_incorrect_end(&mut self) {
+    pub(super) fn inclusive_range_with_incorrect_end(&mut self) -> ErrorGuaranteed {
         let tok = &self.token;
         let span = self.prev_token.span;
         // If the user typed "..==" instead of "..=", we want to give them
@@ -903,15 +905,13 @@ impl<'a> Parser<'a> {
                     let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
                 }
 
-                self.dcx().emit_err(InclusiveRangeExtraEquals { span: span_with_eq });
+                self.dcx().emit_err(InclusiveRangeExtraEquals { span: span_with_eq })
             }
             token::Gt if no_space => {
                 let after_pat = span.with_hi(span.hi() - rustc_span::BytePos(1)).shrink_to_hi();
-                self.dcx().emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat });
-            }
-            _ => {
-                self.dcx().emit_err(InclusiveRangeNoEnd { span });
+                self.dcx().emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat })
             }
+            _ => self.dcx().emit_err(InclusiveRangeNoEnd { span }),
         }
     }
 
@@ -985,7 +985,7 @@ impl<'a> Parser<'a> {
         }
 
         Ok(match recovered {
-            Some(_) => self.mk_expr_err(bound.span),
+            Some(guar) => self.mk_expr_err(bound.span, guar),
             None => bound,
         })
     }
@@ -1067,7 +1067,7 @@ impl<'a> Parser<'a> {
     fn parse_pat_struct(&mut self, qself: Option<P<QSelf>>, path: Path) -> PResult<'a, PatKind> {
         if qself.is_some() {
             // Feature gate the use of qualified paths in patterns
-            self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
+            self.psess.gated_spans.gate(sym::more_qualified_paths, path.span);
         }
         self.bump();
         let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
@@ -1096,7 +1096,7 @@ impl<'a> Parser<'a> {
             )
         })?;
         if qself.is_some() {
-            self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
+            self.psess.gated_spans.gate(sym::more_qualified_paths, path.span);
         }
         Ok(PatKind::TupleStruct(qself, path, fields))
     }
@@ -1143,7 +1143,7 @@ impl<'a> Parser<'a> {
             Ok(PatKind::Ident(BindingAnnotation::NONE, Ident::new(kw::Box, box_span), sub))
         } else {
             let pat = self.parse_pat_with_range_pat(false, None, None)?;
-            self.sess.gated_spans.gate(sym::box_patterns, box_span.to(self.prev_token.span));
+            self.psess.gated_spans.gate(sym::box_patterns, box_span.to(self.prev_token.span));
             Ok(PatKind::Box(pat))
         }
     }
@@ -1153,7 +1153,7 @@ impl<'a> Parser<'a> {
         let mut fields = ThinVec::new();
         let mut etc = PatFieldsRest::None;
         let mut ate_comma = true;
-        let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
+        let mut delayed_err: Option<Diag<'a>> = None;
         let mut first_etc_and_maybe_comma_span = None;
         let mut last_non_comma_dotdot_span = None;
 
@@ -1192,15 +1192,15 @@ impl<'a> Parser<'a> {
                         .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
                     {
                         let nw_span = self
-                            .sess
+                            .psess
                             .source_map()
                             .span_extend_to_line(comma_tok.span)
                             .trim_start(comma_tok.span.shrink_to_lo())
-                            .map(|s| self.sess.source_map().span_until_non_whitespace(s));
+                            .map(|s| self.psess.source_map().span_until_non_whitespace(s));
                         first_etc_and_maybe_comma_span = nw_span.map(|s| etc_sp.to(s));
                     } else {
                         first_etc_and_maybe_comma_span =
-                            Some(self.sess.source_map().span_until_non_whitespace(etc_sp));
+                            Some(self.psess.source_map().span_until_non_whitespace(etc_sp));
                     }
                 }
 
@@ -1218,7 +1218,8 @@ impl<'a> Parser<'a> {
                 let mut comma_sp = None;
                 if self.token == token::Comma {
                     // Issue #49257
-                    let nw_span = self.sess.source_map().span_until_non_whitespace(self.token.span);
+                    let nw_span =
+                        self.psess.source_map().span_until_non_whitespace(self.token.span);
                     etc_sp = etc_sp.to(nw_span);
                     err.span_label(
                         etc_sp,
@@ -1316,11 +1317,7 @@ impl<'a> Parser<'a> {
 
     /// If the user writes `S { ref field: name }` instead of `S { field: ref name }`, we suggest
     /// the correct code.
-    fn recover_misplaced_pattern_modifiers(
-        &self,
-        fields: &ThinVec<PatField>,
-        err: &mut DiagnosticBuilder<'a>,
-    ) {
+    fn recover_misplaced_pattern_modifiers(&self, fields: &ThinVec<PatField>, err: &mut Diag<'a>) {
         if let Some(last) = fields.iter().last()
             && last.is_shorthand
             && let PatKind::Ident(binding, ident, None) = last.pat.kind
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index e7cad74b4dd..545db5138a3 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{Parser, Restrictions, TokenType};
 use crate::errors::PathSingleColon;
 use crate::{errors, maybe_whole};
+use ast::token::IdentIsRaw;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
 use rustc_ast::{
@@ -249,7 +250,7 @@ impl<'a> Parser<'a> {
                         self.dcx().emit_err(PathSingleColon {
                             span: self.prev_token.span,
                             type_ascription: self
-                                .sess
+                                .psess
                                 .unstable_features
                                 .is_nightly_build()
                                 .then_some(()),
@@ -321,7 +322,7 @@ impl<'a> Parser<'a> {
                             err = self.dcx().create_err(PathSingleColon {
                                 span: self.token.span,
                                 type_ascription: self
-                                    .sess
+                                    .psess
                                     .unstable_features
                                     .is_nightly_build()
                                     .then_some(()),
@@ -390,7 +391,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
         match self.token.ident() {
-            Some((ident, false)) if ident.is_path_segment_keyword() => {
+            Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => {
                 self.bump();
                 Ok(ident)
             }
@@ -637,9 +638,9 @@ impl<'a> Parser<'a> {
                             && args.inputs.is_empty()
                             && matches!(args.output, ast::FnRetTy::Default(..))
                         {
-                            self.sess.gated_spans.gate(sym::return_type_notation, span);
+                            self.psess.gated_spans.gate(sym::return_type_notation, span);
                         } else {
-                            self.sess.gated_spans.gate(sym::associated_type_bounds, span);
+                            self.psess.gated_spans.gate(sym::associated_type_bounds, span);
                         }
                     }
                     let constraint =
@@ -674,12 +675,13 @@ impl<'a> Parser<'a> {
         let term = match arg {
             Some(GenericArg::Type(ty)) => ty.into(),
             Some(GenericArg::Const(c)) => {
-                self.sess.gated_spans.gate(sym::associated_const_equality, span);
+                self.psess.gated_spans.gate(sym::associated_const_equality, span);
                 c.into()
             }
             Some(GenericArg::Lifetime(lt)) => {
-                self.dcx().emit_err(errors::AssocLifetime { span, lifetime: lt.ident.span });
-                self.mk_ty(span, ast::TyKind::Err).into()
+                let guar =
+                    self.dcx().emit_err(errors::AssocLifetime { span, lifetime: lt.ident.span });
+                self.mk_ty(span, ast::TyKind::Err(guar)).into()
             }
             None => {
                 let after_eq = eq.shrink_to_hi();
@@ -689,7 +691,7 @@ impl<'a> Parser<'a> {
                     .struct_span_err(after_eq.to(before_next), "missing type to the right of `=`");
                 if matches!(self.token.kind, token::Comma | token::Gt) {
                     err.span_suggestion(
-                        self.sess.source_map().next_point(eq).to(before_next),
+                        self.psess.source_map().next_point(eq).to(before_next),
                         "to constrain the associated type, add a type after `=`",
                         " TheType",
                         Applicability::HasPlaceholders,
@@ -779,7 +781,7 @@ impl<'a> Parser<'a> {
                     // type to determine if error recovery has occurred and if the input is not a
                     // syntactically valid type after all.
                     if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind
-                        && let ast::TyKind::Err = inner_ty.kind
+                        && let ast::TyKind::Err(_) = inner_ty.kind
                         && let Some(snapshot) = snapshot
                         && let Some(expr) =
                             self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 1bae5b32240..54854cd2da9 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -11,6 +11,7 @@ use crate::errors;
 use crate::maybe_whole;
 
 use crate::errors::MalformedLoopLabel;
+use crate::parser::Recovered;
 use ast::Label;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
@@ -19,9 +20,9 @@ use rustc_ast::util::classify;
 use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
 use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
 use rustc_ast::{StmtKind, DUMMY_NODE_ID};
-use rustc_errors::{Applicability, DiagnosticBuilder, PResult};
+use rustc_errors::{Applicability, Diag, PResult};
 use rustc_span::symbol::{kw, sym, Ident};
-use rustc_span::{BytePos, Span};
+use rustc_span::{BytePos, ErrorGuaranteed, Span};
 
 use std::borrow::Cow;
 use std::mem;
@@ -229,8 +230,7 @@ impl<'a> Parser<'a> {
     /// Also error if the previous token was a doc comment.
     fn error_outer_attrs(&self, attrs: AttrWrapper) {
         if !attrs.is_empty()
-            && let attrs = attrs.take_for_recovery(self.sess)
-            && let attrs @ [.., last] = &*attrs
+            && let attrs @ [.., last] = &*attrs.take_for_recovery(self.psess)
         {
             if last.is_doc_comment() {
                 self.dcx().emit_err(errors::DocCommentDoesNotDocumentAnything {
@@ -294,17 +294,22 @@ impl<'a> Parser<'a> {
         let (pat, colon) =
             self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
 
-        let (err, ty) = if colon {
+        let (err, ty, colon_sp) = if colon {
             // Save the state of the parser before parsing type normally, in case there is a `:`
             // instead of an `=` typo.
             let parser_snapshot_before_type = self.clone();
             let colon_sp = self.prev_token.span;
             match self.parse_ty() {
-                Ok(ty) => (None, Some(ty)),
+                Ok(ty) => (None, Some(ty), Some(colon_sp)),
                 Err(mut err) => {
-                    if let Ok(snip) = self.span_to_snippet(pat.span) {
-                        err.span_label(pat.span, format!("while parsing the type for `{snip}`"));
-                    }
+                    err.span_label(
+                        colon_sp,
+                        format!(
+                            "while parsing the type for {}",
+                            pat.descr()
+                                .map_or_else(|| "the binding".to_string(), |n| format!("`{n}`"))
+                        ),
+                    );
                     // we use noexpect here because we don't actually expect Eq to be here
                     // but we are still checking for it in order to be able to handle it if
                     // it is there
@@ -317,11 +322,11 @@ impl<'a> Parser<'a> {
                             mem::replace(self, parser_snapshot_before_type);
                         Some((parser_snapshot_after_type, colon_sp, err))
                     };
-                    (err, None)
+                    (err, None, Some(colon_sp))
                 }
             }
         } else {
-            (None, None)
+            (None, None, None)
         };
         let init = match (self.parse_initializer(err.is_some()), err) {
             (Ok(init), None) => {
@@ -380,7 +385,16 @@ impl<'a> Parser<'a> {
             }
         };
         let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
-        Ok(P(ast::Local { ty, pat, kind, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None }))
+        Ok(P(ast::Local {
+            ty,
+            pat,
+            kind,
+            id: DUMMY_NODE_ID,
+            span: lo.to(hi),
+            colon_sp,
+            attrs,
+            tokens: None,
+        }))
     }
 
     fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
@@ -447,10 +461,7 @@ impl<'a> Parser<'a> {
         Ok(block)
     }
 
-    fn error_block_no_opening_brace_msg(
-        &mut self,
-        msg: Cow<'static, str>,
-    ) -> DiagnosticBuilder<'a> {
+    fn error_block_no_opening_brace_msg(&mut self, msg: Cow<'static, str>) -> Diag<'a> {
         let sp = self.token.span;
         let mut e = self.dcx().struct_span_err(sp, msg);
         let do_not_suggest_help = self.token.is_keyword(kw::In) || self.token == token::Colon;
@@ -483,7 +494,7 @@ impl<'a> Parser<'a> {
             // Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
             Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
             Ok(Some(stmt)) => {
-                let stmt_own_line = self.sess.source_map().is_line_before_span_empty(sp);
+                let stmt_own_line = self.psess.source_map().is_line_before_span_empty(sp);
                 let stmt_span = if stmt_own_line && self.eat(&token::Semi) {
                     // Expand the span to include the semicolon.
                     stmt.span.with_hi(self.prev_token.span.hi())
@@ -602,16 +613,16 @@ impl<'a> Parser<'a> {
                                     Applicability::MaybeIncorrect,
                                 );
                             }
-                            if self.sess.unstable_features.is_nightly_build() {
+                            if self.psess.unstable_features.is_nightly_build() {
                                 // FIXME(Nilstrieb): Remove this again after a few months.
                                 err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
                             }
                         }
                     }
 
-                    err.emit();
+                    let guar = err.emit();
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
-                    Some(self.mk_stmt_err(self.token.span))
+                    Some(self.mk_stmt_err(self.token.span, guar))
                 }
                 Ok(stmt) => stmt,
                 Err(err) => return Err(err),
@@ -650,10 +661,10 @@ impl<'a> Parser<'a> {
                         .contains(&self.token.kind) =>
             {
                 // The user has written `#[attr] expr` which is unsupported. (#106020)
-                self.attr_on_non_tail_expr(&expr);
+                let guar = self.attr_on_non_tail_expr(&expr);
                 // We already emitted an error, so don't emit another type error
                 let sp = expr.span.to(self.prev_token.span);
-                *expr = self.mk_expr_err(sp);
+                *expr = self.mk_expr_err(sp, guar);
             }
 
             // Expression without semicolon.
@@ -661,15 +672,22 @@ impl<'a> Parser<'a> {
                 if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
             {
                 // Just check for errors and recover; do not eat semicolon yet.
-                // `expect_one_of` returns PResult<'a, bool /* recovered */>
 
                 let expect_result =
                     self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
 
+                // Try to both emit a better diagnostic, and avoid further errors by replacing
+                // the `expr` with `ExprKind::Err`.
                 let replace_with_err = 'break_recover: {
                     match expect_result {
-                        // Recover from parser, skip type error to avoid extra errors.
-                        Ok(true) => true,
+                        Ok(Recovered::No) => None,
+                        Ok(Recovered::Yes) => {
+                            // Skip type error to avoid extra errors.
+                            let guar = self
+                                .dcx()
+                                .span_delayed_bug(self.prev_token.span, "expected `;` or `}`");
+                            Some(guar)
+                        }
                         Err(e) => {
                             if self.recover_colon_as_semi() {
                                 // recover_colon_as_semi has already emitted a nicer error.
@@ -677,7 +695,7 @@ impl<'a> Parser<'a> {
                                 add_semi_to_stmt = true;
                                 eat_semi = false;
 
-                                break 'break_recover false;
+                                break 'break_recover None;
                             }
 
                             match &expr.kind {
@@ -691,7 +709,7 @@ impl<'a> Parser<'a> {
                                                     token.kind,
                                                     token::Ident(
                                                         kw::For | kw::Loop | kw::While,
-                                                        false
+                                                        token::IdentIsRaw::No
                                                     ) | token::OpenDelim(Delimiter::Brace)
                                                 )
                                         })
@@ -705,13 +723,13 @@ impl<'a> Parser<'a> {
                                         };
                                         match self.parse_expr_labeled(label, false) {
                                             Ok(labeled_expr) => {
-                                                e.delay_as_bug();
+                                                e.cancel();
                                                 self.dcx().emit_err(MalformedLoopLabel {
                                                     span: label.ident.span,
                                                     correct_label: label.ident,
                                                 });
                                                 *expr = labeled_expr;
-                                                break 'break_recover false;
+                                                break 'break_recover None;
                                             }
                                             Err(err) => {
                                                 err.cancel();
@@ -723,30 +741,30 @@ impl<'a> Parser<'a> {
                                 _ => {}
                             }
 
-                            if let Err(e) =
-                                self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
-                            {
-                                if recover.no() {
-                                    return Err(e);
-                                }
-                                e.emit();
-                                self.recover_stmt();
-                            }
-
-                            true
+                            let res =
+                                self.check_mistyped_turbofish_with_multiple_type_params(e, expr);
+
+                            Some(if recover.no() {
+                                res?
+                            } else {
+                                res.unwrap_or_else(|e| {
+                                    let guar = e.emit();
+                                    self.recover_stmt();
+                                    guar
+                                })
+                            })
                         }
-                        Ok(false) => false,
                     }
                 };
 
-                if replace_with_err {
+                if let Some(guar) = replace_with_err {
                     // We already emitted an error, so don't emit another type error
                     let sp = expr.span.to(self.prev_token.span);
-                    *expr = self.mk_expr_err(sp);
+                    *expr = self.mk_expr_err(sp, guar);
                 }
             }
             StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
-            StmtKind::Local(local) if let Err(e) = self.expect_semi() => {
+            StmtKind::Local(local) if let Err(mut e) = self.expect_semi() => {
                 // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
                 match &mut local.kind {
                     LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
@@ -754,7 +772,47 @@ impl<'a> Parser<'a> {
                         // We found `foo<bar, baz>`, have we fully recovered?
                         self.expect_semi()?;
                     }
-                    LocalKind::Decl => return Err(e),
+                    LocalKind::Decl => {
+                        if let Some(colon_sp) = local.colon_sp {
+                            e.span_label(
+                                colon_sp,
+                                format!(
+                                    "while parsing the type for {}",
+                                    local.pat.descr().map_or_else(
+                                        || "the binding".to_string(),
+                                        |n| format!("`{n}`")
+                                    )
+                                ),
+                            );
+                            let suggest_eq = if self.token.kind == token::Dot
+                                && let _ = self.bump()
+                                && let mut snapshot = self.create_snapshot_for_diagnostic()
+                                && let Ok(_) = snapshot.parse_dot_suffix_expr(
+                                    colon_sp,
+                                    self.mk_expr_err(
+                                        colon_sp,
+                                        self.dcx().delayed_bug("error during `:` -> `=` recovery"),
+                                    ),
+                                ) {
+                                true
+                            } else if let Some(op) = self.check_assoc_op()
+                                && op.node.can_continue_expr_unambiguously()
+                            {
+                                true
+                            } else {
+                                false
+                            };
+                            if suggest_eq {
+                                e.span_suggestion_short(
+                                    colon_sp,
+                                    "use `=` if you meant to assign",
+                                    "=",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                        }
+                        return Err(e);
+                    }
                 }
                 eat_semi = false;
             }
@@ -791,11 +849,11 @@ impl<'a> Parser<'a> {
         Stmt { id: DUMMY_NODE_ID, kind, span }
     }
 
-    pub(super) fn mk_stmt_err(&self, span: Span) -> Stmt {
-        self.mk_stmt(span, StmtKind::Expr(self.mk_expr_err(span)))
+    pub(super) fn mk_stmt_err(&self, span: Span, guar: ErrorGuaranteed) -> Stmt {
+        self.mk_stmt(span, StmtKind::Expr(self.mk_expr_err(span, guar)))
     }
 
-    pub(super) fn mk_block_err(&self, span: Span) -> P<Block> {
-        self.mk_block(thin_vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
+    pub(super) fn mk_block_err(&self, span: Span, guar: ErrorGuaranteed) -> P<Block> {
+        self.mk_block(thin_vec![self.mk_stmt_err(span, guar)], BlockCheckMode::Default, span)
     }
 }
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 157fb9e505a..2385c18c089 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,4 +1,4 @@
-use super::{Parser, PathStyle, TokenType};
+use super::{Parser, PathStyle, TokenType, Trailing};
 
 use crate::errors::{
     self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType,
@@ -346,8 +346,10 @@ impl<'a> Parser<'a> {
                 AllowCVariadic::No => {
                     // FIXME(Centril): Should we just allow `...` syntactically
                     // anywhere in a type and use semantic restrictions instead?
-                    self.dcx().emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
-                    TyKind::Err
+                    let guar = self
+                        .dcx()
+                        .emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
+                    TyKind::Err(guar)
                 }
             }
         } else {
@@ -395,7 +397,7 @@ impl<'a> Parser<'a> {
         let (fields, _recovered) =
             self.parse_record_struct_body(if is_union { "union" } else { "struct" }, lo, false)?;
         let span = lo.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::unnamed_fields, span);
+        self.psess.gated_spans.gate(sym::unnamed_fields, span);
         let id = ast::DUMMY_NODE_ID;
         let kind =
             if is_union { TyKind::AnonUnion(id, fields) } else { TyKind::AnonStruct(id, fields) };
@@ -413,7 +415,7 @@ impl<'a> Parser<'a> {
             Ok(ty)
         })?;
 
-        if ts.len() == 1 && !trailing {
+        if ts.len() == 1 && matches!(trailing, Trailing::No) {
             let ty = ts.into_iter().next().unwrap().into_inner();
             let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
             match ty.kind {
@@ -493,8 +495,8 @@ impl<'a> Parser<'a> {
             {
                 // Recover from `[LIT; EXPR]` and `[LIT]`
                 self.bump();
-                err.emit();
-                self.mk_ty(self.prev_token.span, TyKind::Err)
+                let guar = err.emit();
+                self.mk_ty(self.prev_token.span, TyKind::Err(guar))
             }
             Err(err) => return Err(err),
         };
@@ -692,7 +694,7 @@ impl<'a> Parser<'a> {
 
         // parse dyn* types
         let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
-            self.sess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
+            self.psess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
             TraitObjectSyntax::DynStar
         } else {
             TraitObjectSyntax::Dyn
@@ -776,9 +778,10 @@ impl<'a> Parser<'a> {
             || self.check(&token::Not)
             || self.check(&token::Question)
             || self.check(&token::Tilde)
-            || self.check_keyword(kw::Const)
             || self.check_keyword(kw::For)
             || self.check(&token::OpenDelim(Delimiter::Parenthesis))
+            || self.check_keyword(kw::Const)
+            || self.check_keyword(kw::Async)
     }
 
     /// Parses a bound according to the grammar:
@@ -871,20 +874,22 @@ impl<'a> Parser<'a> {
             let tilde = self.prev_token.span;
             self.expect_keyword(kw::Const)?;
             let span = tilde.to(self.prev_token.span);
-            self.sess.gated_spans.gate(sym::const_trait_impl, span);
+            self.psess.gated_spans.gate(sym::const_trait_impl, span);
             BoundConstness::Maybe(span)
         } else if self.eat_keyword(kw::Const) {
-            self.sess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
+            self.psess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
             BoundConstness::Always(self.prev_token.span)
         } else {
             BoundConstness::Never
         };
 
-        let asyncness = if self.token.span.at_least_rust_2018() && self.eat_keyword(kw::Async) {
-            self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
+        let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
+            && self.eat_keyword(kw::Async)
+        {
+            self.psess.gated_spans.gate(sym::async_closure, self.prev_token.span);
             BoundAsyncness::Async(self.prev_token.span)
         } else if self.may_recover()
-            && self.token.span.is_rust_2015()
+            && self.token.uninterpolated_span().is_rust_2015()
             && self.is_kw_followed_by_ident(kw::Async)
         {
             self.bump(); // eat `async`
@@ -892,7 +897,7 @@ impl<'a> Parser<'a> {
                 span: self.prev_token.span,
                 help: HelpUseLatestEdition::new(),
             });
-            self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
+            self.psess.gated_spans.gate(sym::async_closure, self.prev_token.span);
             BoundAsyncness::Async(self.prev_token.span)
         } else {
             BoundAsyncness::Normal
@@ -901,7 +906,7 @@ impl<'a> Parser<'a> {
         let polarity = if self.eat(&token::Question) {
             BoundPolarity::Maybe(self.prev_token.span)
         } else if self.eat(&token::Not) {
-            self.sess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
+            self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
             BoundPolarity::Negative(self.prev_token.span)
         } else {
             BoundPolarity::Positive
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 2fafbd6d97b..f88edf29dce 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -13,7 +13,7 @@ use rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT;
 use rustc_session::parse::ParseSess;
 use rustc_span::{sym, Span, Symbol};
 
-pub fn check_attr(sess: &ParseSess, attr: &Attribute) {
+pub fn check_attr(psess: &ParseSess, attr: &Attribute) {
     if attr.is_doc_comment() {
         return;
     }
@@ -24,11 +24,11 @@ pub fn check_attr(sess: &ParseSess, attr: &Attribute) {
     match attr_info {
         // `rustc_dummy` doesn't have any restrictions specific to built-in attributes.
         Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => {
-            check_builtin_attribute(sess, attr, *name, *template)
+            check_builtin_attribute(psess, attr, *name, *template)
         }
         _ if let AttrArgs::Eq(..) = attr.get_normal_item().args => {
             // All key-value attributes are restricted to meta-item syntax.
-            parse_meta(sess, attr)
+            parse_meta(psess, attr)
                 .map_err(|err| {
                     err.emit();
                 })
@@ -38,7 +38,7 @@ pub fn check_attr(sess: &ParseSess, attr: &Attribute) {
     }
 }
 
-pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, MetaItem> {
+pub fn parse_meta<'a>(psess: &'a ParseSess, attr: &Attribute) -> PResult<'a, MetaItem> {
     let item = attr.get_normal_item();
     Ok(MetaItem {
         span: attr.span,
@@ -46,8 +46,9 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
         kind: match &item.args {
             AttrArgs::Empty => MetaItemKind::Word,
             AttrArgs::Delimited(DelimArgs { dspan, delim, tokens }) => {
-                check_meta_bad_delim(sess, *dspan, *delim);
-                let nmis = parse_in(sess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
+                check_meta_bad_delim(psess, *dspan, *delim);
+                let nmis =
+                    parse_in(psess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
                 MetaItemKind::List(nmis)
             }
             AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
@@ -56,7 +57,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
                     let res = match res {
                         Ok(lit) => {
                             if token_lit.suffix.is_some() {
-                                let mut err = sess.dcx.struct_span_err(
+                                let mut err = psess.dcx.struct_span_err(
                                     expr.span,
                                     "suffixed literals are not allowed in attributes",
                                 );
@@ -70,11 +71,11 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
                             }
                         }
                         Err(err) => {
-                            report_lit_error(sess, err, token_lit, expr.span);
+                            let guar = report_lit_error(psess, err, token_lit, expr.span);
                             let lit = ast::MetaItemLit {
                                 symbol: token_lit.symbol,
                                 suffix: token_lit.suffix,
-                                kind: ast::LitKind::Err,
+                                kind: ast::LitKind::Err(guar),
                                 span: expr.span,
                             };
                             MetaItemKind::NameValue(lit)
@@ -88,9 +89,9 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
                     //   results in `ast::ExprKind::Err`. In that case we delay
                     //   the error because an earlier error will have already
                     //   been reported.
-                    let msg = format!("attribute value must be a literal");
-                    let mut err = sess.dcx.struct_span_err(expr.span, msg);
-                    if let ast::ExprKind::Err = expr.kind {
+                    let msg = "attribute value must be a literal";
+                    let mut err = psess.dcx.struct_span_err(expr.span, msg);
+                    if let ast::ExprKind::Err(_) = expr.kind {
                         err.downgrade_to_delayed_bug();
                     }
                     return Err(err);
@@ -101,21 +102,21 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
     })
 }
 
-pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter) {
+pub fn check_meta_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) {
     if let Delimiter::Parenthesis = delim {
         return;
     }
-    sess.dcx.emit_err(errors::MetaBadDelim {
+    psess.dcx.emit_err(errors::MetaBadDelim {
         span: span.entire(),
         sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
     });
 }
 
-pub fn check_cfg_attr_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter) {
+pub fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) {
     if let Delimiter::Parenthesis = delim {
         return;
     }
-    sess.dcx.emit_err(errors::CfgAttrBadDelim {
+    psess.dcx.emit_err(errors::CfgAttrBadDelim {
         span: span.entire(),
         sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
     });
@@ -132,13 +133,13 @@ fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaIte
 }
 
 pub fn check_builtin_attribute(
-    sess: &ParseSess,
+    psess: &ParseSess,
     attr: &Attribute,
     name: Symbol,
     template: AttributeTemplate,
 ) {
-    match parse_meta(sess, attr) {
-        Ok(meta) => check_builtin_meta_item(sess, &meta, attr.style, name, template),
+    match parse_meta(psess, attr) {
+        Ok(meta) => check_builtin_meta_item(psess, &meta, attr.style, name, template),
         Err(err) => {
             err.emit();
         }
@@ -146,7 +147,7 @@ pub fn check_builtin_attribute(
 }
 
 pub fn check_builtin_meta_item(
-    sess: &ParseSess,
+    psess: &ParseSess,
     meta: &MetaItem,
     style: ast::AttrStyle,
     name: Symbol,
@@ -157,12 +158,12 @@ pub fn check_builtin_meta_item(
     let should_skip = |name| name == sym::cfg;
 
     if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
-        emit_malformed_attribute(sess, style, meta.span, name, template);
+        emit_malformed_attribute(psess, style, meta.span, name, template);
     }
 }
 
 fn emit_malformed_attribute(
-    sess: &ParseSess,
+    psess: &ParseSess,
     style: ast::AttrStyle,
     span: Span,
     name: Symbol,
@@ -204,9 +205,10 @@ fn emit_malformed_attribute(
     }
     suggestions.sort();
     if should_warn(name) {
-        sess.buffer_lint(ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, msg);
+        psess.buffer_lint(ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, msg);
     } else {
-        sess.dcx
+        psess
+            .dcx
             .struct_span_err(span, error_msg)
             .with_span_suggestions(
                 span,
@@ -223,12 +225,12 @@ fn emit_malformed_attribute(
 }
 
 pub fn emit_fatal_malformed_builtin_attribute(
-    sess: &ParseSess,
+    psess: &ParseSess,
     attr: &Attribute,
     name: Symbol,
 ) -> ! {
     let template = BUILTIN_ATTRIBUTE_MAP.get(&name).expect("builtin attr defined").template;
-    emit_malformed_attribute(sess, attr.style, attr.span, name, template);
+    emit_malformed_attribute(psess, attr.style, attr.span, name, template);
     // This is fatal, otherwise it will likely cause a cascade of other errors
     // (and an error here is expected to be very rare).
     FatalError.raise()