about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs16
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs13
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs4
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs4
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs2
-rw-r--r--compiler/rustc_parse/src/lib.rs6
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs8
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs88
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs164
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs2
-rw-r--r--compiler/rustc_parse/src/parser/item.rs43
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs55
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs12
-rw-r--r--compiler/rustc_parse/src/parser/path.rs3
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs15
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs6
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs41
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs10
18 files changed, 261 insertions, 231 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 6c1fcbe06fc..46e15734853 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -3,7 +3,7 @@ use std::borrow::Cow;
 use rustc_ast::token::Token;
 use rustc_ast::{Path, Visibility};
 use rustc_errors::{
-    codes::*, Applicability, Diag, DiagCtxt, Diagnostic, EmissionGuarantee, Level,
+    codes::*, Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level,
     SubdiagMessageOp, Subdiagnostic,
 };
 use rustc_macros::{Diagnostic, Subdiagnostic};
@@ -1052,7 +1052,7 @@ pub(crate) struct ExpectedIdentifier {
 
 impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedIdentifier {
     #[track_caller]
-    fn into_diag(self, dcx: &'a DiagCtxt, level: Level) -> Diag<'a, G> {
+    fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> {
         let token_descr = TokenDescription::from_token(&self.token);
 
         let mut diag = Diag::new(
@@ -1112,7 +1112,7 @@ pub(crate) struct ExpectedSemi {
 
 impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedSemi {
     #[track_caller]
-    fn into_diag(self, dcx: &'a DiagCtxt, level: Level) -> Diag<'a, G> {
+    fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> {
         let token_descr = TokenDescription::from_token(&self.token);
 
         let mut diag = Diag::new(
@@ -2699,12 +2699,13 @@ pub(crate) struct SingleColonImportPath {
 
 #[derive(Diagnostic)]
 #[diag(parse_bad_item_kind)]
-#[help]
 pub(crate) struct BadItemKind {
     #[primary_span]
     pub span: Span,
     pub descr: &'static str,
     pub ctx: &'static str,
+    #[help]
+    pub help: Option<()>,
 }
 
 #[derive(Diagnostic)]
@@ -2989,3 +2990,10 @@ pub(crate) struct ExprRArrowCall {
     #[suggestion(style = "short", applicability = "machine-applicable", code = ".")]
     pub span: Span,
 }
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_range_attribute)]
+pub(crate) struct DotDotRangeAttribute {
+    #[primary_span]
+    pub span: Span,
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 43f4963b27a..511805cf8d6 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -7,7 +7,7 @@ use rustc_ast::ast::{self, AttrStyle};
 use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{codes::*, Applicability, Diag, DiagCtxt, StashKey};
+use rustc_errors::{codes::*, Applicability, Diag, DiagCtxtHandle, StashKey};
 use rustc_lexer::unescape::{self, EscapeError, Mode};
 use rustc_lexer::{Base, DocStyle, RawStrError};
 use rustc_lexer::{Cursor, LiteralKind};
@@ -113,8 +113,8 @@ struct StringReader<'psess, 'src> {
 }
 
 impl<'psess, 'src> StringReader<'psess, 'src> {
-    fn dcx(&self) -> &'psess DiagCtxt {
-        &self.psess.dcx
+    fn dcx(&self) -> DiagCtxtHandle<'psess> {
+        self.psess.dcx()
     }
 
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
@@ -248,8 +248,8 @@ impl<'psess, 'src> StringReader<'psess, 'src> {
                     let suffix = if suffix_start < self.pos {
                         let string = self.str_from(suffix_start);
                         if string == "_" {
-                            self.psess
-                                .dcx
+                            self
+                                .dcx()
                                 .emit_err(errors::UnderscoreLiteralSuffix { span: self.mk_sp(suffix_start, self.pos) });
                             None
                         } else {
@@ -597,8 +597,7 @@ impl<'psess, 'src> StringReader<'psess, 'src> {
     }
 
     fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
-        self.psess
-            .dcx
+        self.dcx()
             .struct_span_fatal(
                 self.mk_sp(start, self.pos),
                 format!(
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index f7645446081..8e543454691 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -71,7 +71,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
 
     fn eof_err(&mut self) -> PErr<'psess> {
         let msg = "this file contains an unclosed delimiter";
-        let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
+        let mut err = self.string_reader.dcx().struct_span_err(self.token.span, msg);
         for &(_, sp) in &self.diag_info.open_braces {
             err.span_label(sp, "unclosed delimiter");
             self.diag_info.unmatched_delims.push(UnmatchedDelim {
@@ -290,7 +290,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
         // An unexpected closing delimiter (i.e., there is no matching opening delimiter).
         let token_str = token_to_string(&self.token);
         let msg = format!("unexpected closing delimiter: `{token_str}`");
-        let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
+        let mut err = self.string_reader.dcx().struct_span_err(self.token.span, msg);
 
         report_suspicious_mismatch_block(
             &mut err,
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index cad25c66827..b7a790fcf83 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -3,7 +3,7 @@
 use std::iter::once;
 use std::ops::Range;
 
-use rustc_errors::{Applicability, DiagCtxt, ErrorGuaranteed};
+use rustc_errors::{Applicability, DiagCtxtHandle, ErrorGuaranteed};
 use rustc_lexer::unescape::{EscapeError, Mode};
 use rustc_span::{BytePos, Span};
 use tracing::debug;
@@ -11,7 +11,7 @@ use tracing::debug;
 use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError};
 
 pub(crate) fn emit_unescape_error(
-    dcx: &DiagCtxt,
+    dcx: DiagCtxtHandle<'_>,
     // interior part of the literal, between quotes
     lit: &str,
     // full span of the literal, including quotes and any prefix
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 8eb299108d1..0a82ede3b75 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -351,7 +351,7 @@ pub(super) fn check_for_substitution(
 
     let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
         let msg = format!("substitution character not found for '{ch}'");
-        reader.psess.dcx.span_bug(span, msg);
+        reader.dcx().span_bug(span, msg);
     };
 
     // special help suggestion for "directed" double quotes
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 25cab7252a3..5522127be83 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -73,7 +73,7 @@ pub fn new_parser_from_file<'a>(
 ) -> Result<Parser<'a>, Vec<Diag<'a>>> {
     let source_file = psess.source_map().load_file(path).unwrap_or_else(|e| {
         let msg = format!("couldn't read {}: {}", path.display(), e);
-        let mut err = psess.dcx.struct_fatal(msg);
+        let mut err = psess.dcx().struct_fatal(msg);
         if let Some(sp) = sp {
             err.span(sp);
         }
@@ -115,7 +115,7 @@ fn source_file_to_stream<'psess>(
     override_span: Option<Span>,
 ) -> Result<TokenStream, Vec<Diag<'psess>>> {
     let src = source_file.src.as_ref().unwrap_or_else(|| {
-        psess.dcx.bug(format!(
+        psess.dcx().bug(format!(
             "cannot lex `source_file` without source: {}",
             psess.source_map().filename_for_diagnostics(&source_file.name)
         ));
@@ -179,7 +179,7 @@ pub fn parse_cfg_attr(
             }
         }
         _ => {
-            psess.dcx.emit_err(errors::MalformedCfgAttr {
+            psess.dcx().emit_err(errors::MalformedCfgAttr {
                 span: attr.span,
                 sugg: CFG_ATTR_GRAMMAR_HELP,
             });
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 62c8f9f5dac..b5480b6b7d2 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -15,7 +15,7 @@ use std::ops::Range;
 /// for the attribute target. This allows us to perform cfg-expansion on
 /// a token stream before we invoke a derive proc-macro.
 ///
-/// This wrapper prevents direct access to the underlying `ast::AttrVec>`.
+/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
 /// Parsing code can only get access to the underlying attributes
 /// by passing an `AttrWrapper` to `collect_tokens_trailing_tokens`.
 /// This makes it difficult to accidentally construct an AST node
@@ -41,7 +41,7 @@ impl AttrWrapper {
     }
 
     pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
-        psess.dcx.span_delayed_bug(
+        psess.dcx().span_delayed_bug(
             self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
             "AttrVec is taken for recovery but no error is produced",
         );
@@ -177,6 +177,10 @@ impl<'a> Parser<'a> {
     /// into a `LazyAttrTokenStream`, and returned along with the result
     /// of the callback.
     ///
+    /// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
+    /// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
+    /// details.
+    ///
     /// Note: If your callback consumes an opening delimiter
     /// (including the case where you call `collect_tokens`
     /// when the current token is an opening delimiter),
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 2bb6fb53869..81d5f0fca0e 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -1,7 +1,6 @@
 use super::pat::Expected;
 use super::{
-    BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep,
-    TokenExpectType, TokenType,
+    BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenType,
 };
 use crate::errors::{
     AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, BadQPathStage2, BadTypePlus,
@@ -35,7 +34,7 @@ use rustc_ast::{
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::{
-    pluralize, Applicability, Diag, DiagCtxt, ErrorGuaranteed, FatalError, PErr, PResult,
+    pluralize, Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, PErr, PResult,
     Subdiagnostic,
 };
 use rustc_session::errors::ExprParenthesesNeeded;
@@ -241,8 +240,8 @@ impl<'a> DerefMut for SnapshotParser<'a> {
 }
 
 impl<'a> Parser<'a> {
-    pub fn dcx(&self) -> &'a DiagCtxt {
-        &self.psess.dcx
+    pub fn dcx(&self) -> DiagCtxtHandle<'a> {
+        self.psess.dcx()
     }
 
     /// Replace `self` with `snapshot.parser`.
@@ -667,7 +666,7 @@ impl<'a> Parser<'a> {
         {
             err.note("you may be trying to write a c-string literal");
             err.note("c-string literals require Rust 2021 or later");
-            err.subdiagnostic(self.dcx(), HelpUseLatestEdition::new());
+            err.subdiagnostic(HelpUseLatestEdition::new());
         }
 
         // `pub` may be used for an item or `pub(crate)`
@@ -1045,9 +1044,7 @@ impl<'a> Parser<'a> {
     /// passes through any errors encountered. Used for error recovery.
     pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
         if let Err(err) =
-            self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| {
-                Ok(p.parse_token_tree())
-            })
+            self.parse_seq_to_before_tokens(kets, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
         {
             err.cancel();
         }
@@ -2360,7 +2357,7 @@ impl<'a> Parser<'a> {
         let mut err = self.dcx().struct_span_err(span, msg);
         let sp = self.psess.source_map().start_point(self.token.span);
         if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp));
+            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
         }
         err.span_label(span, "expected expression");
 
@@ -2502,7 +2499,8 @@ impl<'a> Parser<'a> {
     /// wrapped in braces.
     pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
         let start = self.token.span;
-        let expr = self.parse_expr_res(Restrictions::CONST_EXPR, None).map_err(|mut err| {
+        let attrs = self.parse_outer_attributes()?;
+        let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
             err.span_label(
                 start.shrink_to_lo(),
                 "while parsing a const generic argument starting here",
@@ -2624,7 +2622,10 @@ impl<'a> Parser<'a> {
         if is_op_or_dot {
             self.bump();
         }
-        match self.parse_expr_res(Restrictions::CONST_EXPR, None) {
+        match (|| {
+            let attrs = self.parse_outer_attributes()?;
+            self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
+        })() {
             Ok(expr) => {
                 // Find a mistake like `MyTrait<Assoc == S::Assoc>`.
                 if token::EqEq == snapshot.token.kind {
@@ -2678,7 +2679,10 @@ impl<'a> Parser<'a> {
         &mut self,
         mut snapshot: SnapshotParser<'a>,
     ) -> Option<P<ast::Expr>> {
-        match snapshot.parse_expr_res(Restrictions::CONST_EXPR, None) {
+        match (|| {
+            let attrs = self.parse_outer_attributes()?;
+            snapshot.parse_expr_res(Restrictions::CONST_EXPR, attrs)
+        })() {
             // Since we don't know the exact reason why we failed to parse the type or the
             // expression, employ a simple heuristic to weed out some pathological cases.
             Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
@@ -2961,9 +2965,10 @@ impl<'a> Parser<'a> {
 
     /// This checks if this is a conflict marker, depending of the parameter passed.
     ///
-    /// * `>>>>>`
-    /// * `=====`
-    /// * `<<<<<`
+    /// * `<<<<<<<`
+    /// * `|||||||`
+    /// * `=======`
+    /// * `>>>>>>>`
     ///
     pub(super) fn is_vcs_conflict_marker(
         &mut self,
@@ -2993,14 +2998,18 @@ impl<'a> Parser<'a> {
     }
 
     pub(crate) fn err_vcs_conflict_marker(&mut self) -> PResult<'a, ()> {
+        // <<<<<<<
         let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt)
         else {
             return Ok(());
         };
         let mut spans = Vec::with_capacity(3);
         spans.push(start);
+        // |||||||
         let mut middlediff3 = None;
+        // =======
         let mut middle = None;
+        // >>>>>>>
         let mut end = None;
         loop {
             if self.token.kind == TokenKind::Eof {
@@ -3021,29 +3030,50 @@ impl<'a> Parser<'a> {
             }
             self.bump();
         }
+
         let mut err = self.dcx().struct_span_err(spans, "encountered diff marker");
-        err.span_label(start, "after this is the code before the merge");
-        if let Some(middle) = middlediff3 {
-            err.span_label(middle, "");
-        }
+        match middlediff3 {
+            // We're using diff3
+            Some(middlediff3) => {
+                err.span_label(
+                    start,
+                    "between this marker and `|||||||` is the code that we're merging into",
+                );
+                err.span_label(middlediff3, "between this marker and `=======` is the base code (what the two refs diverged from)");
+            }
+            None => {
+                err.span_label(
+                    start,
+                    "between this marker and `=======` is the code that we're merging into",
+                );
+            }
+        };
+
         if let Some(middle) = middle {
-            err.span_label(middle, "");
+            err.span_label(middle, "between this marker and `>>>>>>>` is the incoming code");
         }
         if let Some(end) = end {
-            err.span_label(end, "above this are the incoming code changes");
+            err.span_label(end, "this marker concludes the conflict region");
         }
-        err.help(
-            "if you're having merge conflicts after pulling new code, the top section is the code \
-             you already had and the bottom section is the remote code",
+        err.note(
+            "conflict markers indicate that a merge was started but could not be completed due \
+             to merge conflicts\n\
+             to resolve a conflict, keep only the code you want and then delete the lines \
+             containing conflict markers",
         );
         err.help(
-            "if you're in the middle of a rebase, the top section is the code being rebased onto \
-             and the bottom section is the code coming from the current commit being rebased",
+            "if you're having merge conflicts after pulling new code:\n\
+             the top section is the code you already had and the bottom section is the remote code\n\
+             if you're in the middle of a rebase:\n\
+             the top section is the code being rebased onto and the bottom section is the code \
+             coming from the current commit being rebased",
         );
+
         err.note(
-            "for an explanation on these markers from the `git` documentation, visit \
-             <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
+            "for an explanation on these markers from the `git` documentation:\n\
+             visit <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
         );
+
         Err(err)
     }
 
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index e15d6ab2123..9fad954adda 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,10 +1,11 @@
 // ignore-tidy-filelength
+
 use super::diagnostics::SnapshotParser;
 use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
     AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
-    SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken,
+    SemiColonMode, SeqSep, TokenType, Trailing, TrailingToken,
 };
 
 use crate::errors;
@@ -70,28 +71,10 @@ macro_rules! maybe_whole_expr {
 
 #[derive(Debug)]
 pub(super) enum LhsExpr {
-    NotYetParsed,
-    AttributesParsed(AttrWrapper),
-    AlreadyParsed { expr: P<Expr>, starts_statement: bool },
-}
-
-impl From<Option<AttrWrapper>> for LhsExpr {
-    /// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
-    /// and `None` into `LhsExpr::NotYetParsed`.
-    ///
-    /// This conversion does not allocate.
-    fn from(o: Option<AttrWrapper>) -> Self {
-        if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed }
-    }
-}
-
-impl From<P<Expr>> for LhsExpr {
-    /// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed { expr, starts_statement: false }`.
-    ///
-    /// This conversion does not allocate.
-    fn from(expr: P<Expr>) -> Self {
-        LhsExpr::AlreadyParsed { expr, starts_statement: false }
-    }
+    // Already parsed just the outer attributes.
+    Unparsed { attrs: AttrWrapper },
+    // Already parsed the expression.
+    Parsed { expr: P<Expr>, starts_statement: bool },
 }
 
 #[derive(Debug)]
@@ -112,12 +95,16 @@ impl<'a> Parser<'a> {
     pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
         self.current_closure.take();
 
-        self.parse_expr_res(Restrictions::empty(), None)
+        let attrs = self.parse_outer_attributes()?;
+        self.parse_expr_res(Restrictions::empty(), attrs)
     }
 
-    /// Parses an expression, forcing tokens to be collected
+    /// Parses an expression, forcing tokens to be collected.
     pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
-        self.collect_tokens_no_attrs(|this| this.parse_expr())
+        self.current_closure.take();
+
+        let attrs = self.parse_outer_attributes()?;
+        self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
     }
 
     pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
@@ -125,7 +112,8 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
-        match self.parse_expr_res(restrictions, None) {
+        let attrs = self.parse_outer_attributes()?;
+        match self.parse_expr_res(restrictions, attrs) {
             Ok(expr) => Ok(expr),
             Err(err) => match self.token.ident() {
                 Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
@@ -152,21 +140,9 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_expr_res(
         &mut self,
         r: Restrictions,
-        already_parsed_attrs: Option<AttrWrapper>,
-    ) -> PResult<'a, P<Expr>> {
-        self.with_res(r, |this| this.parse_expr_assoc(already_parsed_attrs))
-    }
-
-    /// Parses an associative expression.
-    ///
-    /// This parses an expression accounting for associativity and precedence of the operators in
-    /// the expression.
-    #[inline]
-    fn parse_expr_assoc(
-        &mut self,
-        already_parsed_attrs: Option<AttrWrapper>,
+        attrs: AttrWrapper,
     ) -> PResult<'a, P<Expr>> {
-        self.parse_expr_assoc_with(0, already_parsed_attrs.into())
+        self.with_res(r, |this| this.parse_expr_assoc_with(0, LhsExpr::Unparsed { attrs }))
     }
 
     /// Parses an associative expression with operators of at least `min_prec` precedence.
@@ -176,18 +152,17 @@ impl<'a> Parser<'a> {
         lhs: LhsExpr,
     ) -> PResult<'a, P<Expr>> {
         let mut starts_stmt = false;
-        let mut lhs = if let LhsExpr::AlreadyParsed { expr, starts_statement } = lhs {
-            starts_stmt = starts_statement;
-            expr
-        } else {
-            let attrs = match lhs {
-                LhsExpr::AttributesParsed(attrs) => Some(attrs),
-                _ => None,
-            };
-            if self.token.is_range_separator() {
-                return self.parse_expr_prefix_range(attrs);
-            } else {
-                self.parse_expr_prefix(attrs)?
+        let mut lhs = match lhs {
+            LhsExpr::Parsed { expr, starts_statement } => {
+                starts_stmt = starts_statement;
+                expr
+            }
+            LhsExpr::Unparsed { attrs } => {
+                if self.token.is_range_separator() {
+                    return self.parse_expr_prefix_range(attrs);
+                } else {
+                    self.parse_expr_prefix(attrs)?
+                }
             }
         };
 
@@ -325,7 +300,8 @@ impl<'a> Parser<'a> {
                 Fixity::None => 1,
             };
             let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
-                this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
+                let attrs = this.parse_outer_attributes()?;
+                this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::Unparsed { attrs })
             })?;
 
             let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
@@ -498,8 +474,9 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Expr>> {
         let rhs = if self.is_at_start_of_range_notation_rhs() {
             let maybe_lt = self.token.clone();
+            let attrs = self.parse_outer_attributes()?;
             Some(
-                self.parse_expr_assoc_with(prec + 1, LhsExpr::NotYetParsed)
+                self.parse_expr_assoc_with(prec + 1, LhsExpr::Unparsed { attrs })
                     .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
             )
         } else {
@@ -526,7 +503,12 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
-    fn parse_expr_prefix_range(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_prefix_range(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>> {
+        if !attrs.is_empty() {
+            let err = errors::DotDotRangeAttribute { span: self.token.span };
+            self.dcx().emit_err(err);
+        }
+
         // Check for deprecated `...` syntax.
         if self.token == token::DotDotDot {
             self.err_dotdotdot_syntax(self.token.span);
@@ -543,20 +525,20 @@ impl<'a> Parser<'a> {
             _ => RangeLimits::Closed,
         };
         let op = AssocOp::from_token(&self.token);
-        // FIXME: `parse_prefix_range_expr` is called when the current
-        // token is `DotDot`, `DotDotDot`, or `DotDotEq`. If we haven't already
-        // parsed attributes, then trying to parse them here will always fail.
-        // We should figure out how we want attributes on range expressions to work.
-        let attrs = self.parse_or_use_outer_attributes(attrs)?;
+        let attrs = self.parse_outer_attributes()?;
         self.collect_tokens_for_expr(attrs, |this, attrs| {
             let lo = this.token.span;
             let maybe_lt = this.look_ahead(1, |t| t.clone());
             this.bump();
             let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
                 // RHS must be parsed with more associativity than the dots.
-                this.parse_expr_assoc_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
-                    .map(|x| (lo.to(x.span), Some(x)))
-                    .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
+                let attrs = this.parse_outer_attributes()?;
+                this.parse_expr_assoc_with(
+                    op.unwrap().precedence() + 1,
+                    LhsExpr::Unparsed { attrs },
+                )
+                .map(|x| (lo.to(x.span), Some(x)))
+                .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
             } else {
                 (lo, None)
             };
@@ -566,8 +548,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a prefix-unary-operator expr.
-    fn parse_expr_prefix(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
-        let attrs = self.parse_or_use_outer_attributes(attrs)?;
+    fn parse_expr_prefix(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
         macro_rules! make_it {
@@ -616,7 +597,8 @@ impl<'a> Parser<'a> {
                 this.dcx().emit_err(err);
 
                 this.bump();
-                this.parse_expr_prefix(None)
+                let attrs = this.parse_outer_attributes()?;
+                this.parse_expr_prefix(attrs)
             }
             // Recover from `++x`:
             token::BinOp(token::Plus)
@@ -629,7 +611,7 @@ impl<'a> Parser<'a> {
                 this.bump();
                 this.bump();
 
-                let operand_expr = this.parse_expr_dot_or_call(Default::default())?;
+                let operand_expr = this.parse_expr_dot_or_call(attrs)?;
                 this.recover_from_prefix_increment(operand_expr, pre_span, starts_stmt)
             }
             token::Ident(..) if this.token.is_keyword(kw::Box) => {
@@ -638,13 +620,14 @@ impl<'a> Parser<'a> {
             token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
                 make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
             }
-            _ => return this.parse_expr_dot_or_call(Some(attrs)),
+            _ => return this.parse_expr_dot_or_call(attrs),
         }
     }
 
     fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
         self.bump();
-        let expr = self.parse_expr_prefix(None)?;
+        let attrs = self.parse_outer_attributes()?;
+        let expr = self.parse_expr_prefix(attrs)?;
         let span = self.interpolated_or_expr_span(&expr);
         Ok((lo.to(span), expr))
     }
@@ -894,10 +877,11 @@ impl<'a> Parser<'a> {
         let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
         let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
         let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo);
+        let attrs = self.parse_outer_attributes()?;
         let expr = if self.token.is_range_separator() {
-            self.parse_expr_prefix_range(None)
+            self.parse_expr_prefix_range(attrs)
         } else {
-            self.parse_expr_prefix(None)
+            self.parse_expr_prefix(attrs)
         }?;
         let hi = self.interpolated_or_expr_span(&expr);
         let span = lo.to(hi);
@@ -927,8 +911,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
-    fn parse_expr_dot_or_call(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
-        let attrs = self.parse_or_use_outer_attributes(attrs)?;
+    fn parse_expr_dot_or_call(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>> {
         self.collect_tokens_for_expr(attrs, |this, attrs| {
             let base = this.parse_expr_bottom()?;
             let span = this.interpolated_or_expr_span(&base);
@@ -1461,7 +1444,7 @@ impl<'a> Parser<'a> {
                     // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
                     // then suggest parens around the lhs.
                     if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) {
-                        err.subdiagnostic(this.dcx(), ExprParenthesesNeeded::surrounding(*sp));
+                        err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
                     }
                     err
                 })
@@ -2365,7 +2348,8 @@ impl<'a> Parser<'a> {
                     self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
                 let prev = self.prev_token.clone();
                 let token = self.token.clone();
-                match self.parse_expr_res(restrictions, None) {
+                let attrs = self.parse_outer_attributes()?;
+                match self.parse_expr_res(restrictions, attrs) {
                     Ok(expr) => expr,
                     Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
                 }
@@ -2456,9 +2440,9 @@ impl<'a> Parser<'a> {
             self.expect(&token::BinOp(token::Or))?;
             let args = self
                 .parse_seq_to_before_tokens(
-                    &[&token::BinOp(token::Or), &token::OrOr],
+                    &[&token::BinOp(token::Or)],
+                    &[&token::OrOr],
                     SeqSep::trailing_allowed(token::Comma),
-                    TokenExpectType::NoExpect,
                     |p| p.parse_fn_block_param(),
                 )?
                 .0;
@@ -2613,8 +2597,9 @@ impl<'a> Parser<'a> {
 
     /// Parses the condition of a `if` or `while` expression.
     fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
+        let attrs = self.parse_outer_attributes()?;
         let mut cond =
-            self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
+            self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
 
         CondChecker::new(self).visit_expr(&mut cond);
 
@@ -2661,7 +2646,11 @@ impl<'a> Parser<'a> {
         } else {
             self.expect(&token::Eq)?;
         }
-        let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())?;
+        let attrs = self.parse_outer_attributes()?;
+        let expr = self.parse_expr_assoc_with(
+            1 + prec_let_scrutinee_needs_par(),
+            LhsExpr::Unparsed { attrs },
+        )?;
         let span = lo.to(expr.span);
         Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
     }
@@ -2794,7 +2783,8 @@ impl<'a> Parser<'a> {
             (Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => {
                 // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
                 // happen right before the return of this method.
-                let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None) {
+                let attrs = self.parse_outer_attributes()?;
+                let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
                     Ok(expr) => expr,
                     Err(expr_err) => {
                         // We don't know what followed the `in`, so cancel and bubble up the
@@ -2828,7 +2818,8 @@ impl<'a> Parser<'a> {
             self.error_missing_in_for_loop();
         }
         self.check_for_for_in_in_typo(self.prev_token.span);
-        let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+        let attrs = self.parse_outer_attributes()?;
+        let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
         Ok((pat, expr))
     }
 
@@ -2940,7 +2931,8 @@ impl<'a> Parser<'a> {
     /// Parses a `match ... { ... }` expression (`match` token already eaten).
     fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
         let match_span = self.prev_token.span;
-        let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+        let attrs = self.parse_outer_attributes()?;
+        let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
 
         self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
     }
@@ -3144,8 +3136,9 @@ impl<'a> Parser<'a> {
                 let arrow_span = this.prev_token.span;
                 let arm_start_span = this.token.span;
 
+                let attrs = this.parse_outer_attributes()?;
                 let expr =
-                    this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
+                    this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
                         err.span_label(arrow_span, "while parsing the `match` arm starting here");
                         err
                     })?;
@@ -3350,7 +3343,8 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
-        self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err(
+        let attrs = self.parse_outer_attributes()?;
+        self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err(
             |mut err| {
                 if self.prev_token == token::OpenDelim(Delimiter::Brace) {
                     let sugg_sp = self.prev_token.span.shrink_to_lo();
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 93a15c938ec..fde16ac957d 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -62,7 +62,7 @@ impl<'a> Parser<'a> {
                 let snapshot = self.create_snapshot_for_diagnostic();
                 match self.parse_ty() {
                     Ok(p) => {
-                        if let TyKind::ImplTrait(_, bounds, None) = &p.kind {
+                        if let TyKind::ImplTrait(_, bounds) = &p.kind {
                             let span = impl_span.to(self.token.span.shrink_to_lo());
                             let mut err = self.dcx().struct_span_err(
                                 span,
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 3f5a4afdad8..3e1ea7b129d 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -633,7 +633,7 @@ impl<'a> Parser<'a> {
                     // This notably includes paths passed through `ty` macro fragments (#46438).
                     TyKind::Path(None, path) => path,
                     other => {
-                        if let TyKind::ImplTrait(_, bounds, None) = other
+                        if let TyKind::ImplTrait(_, bounds) = other
                             && let [bound] = bounds.as_slice()
                         {
                             // Suggest removing extra `impl` keyword:
@@ -707,15 +707,25 @@ impl<'a> Parser<'a> {
         };
 
         let (ident, item_kind) = if self.eat(&token::PathSep) {
-            let (suffixes, _) = self.parse_delim_comma_seq(Delimiter::Brace, |p| {
-                Ok((p.parse_path_segment_ident()?, rename(p)?))
-            })?;
+            let suffixes = if self.eat(&token::BinOp(token::Star)) {
+                None
+            } else {
+                let parse_suffix = |p: &mut Self| Ok((p.parse_path_segment_ident()?, rename(p)?));
+                Some(self.parse_delim_comma_seq(Delimiter::Brace, parse_suffix)?.0)
+            };
             let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? };
             (Ident::empty(), ItemKind::DelegationMac(Box::new(deleg)))
         } else {
             let rename = rename(self)?;
             let ident = rename.unwrap_or_else(|| path.segments.last().unwrap().ident);
-            let deleg = Delegation { id: DUMMY_NODE_ID, qself, path, rename, body: body(self)? };
+            let deleg = Delegation {
+                id: DUMMY_NODE_ID,
+                qself,
+                path,
+                rename,
+                body: body(self)?,
+                from_glob: false,
+            };
             (ident, ItemKind::Delegation(Box::new(deleg)))
         };
 
@@ -1237,7 +1247,11 @@ impl<'a> Parser<'a> {
         // FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
         let span = self.psess.source_map().guess_head_span(span);
         let descr = kind.descr();
-        self.dcx().emit_err(errors::BadItemKind { span, descr, ctx });
+        let help = match kind {
+            ItemKind::DelegationMac(deleg) if deleg.suffixes.is_none() => None,
+            _ => Some(()),
+        };
+        self.dcx().emit_err(errors::BadItemKind { span, descr, ctx, help });
         None
     }
 
@@ -1245,7 +1259,7 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw::Unsafe)
             && self.is_keyword_ahead(1, &[kw::Extern])
             && self.look_ahead(
-                2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize),
+                2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
                 |t| t.kind == token::OpenDelim(Delimiter::Brace),
             )
     }
@@ -1966,7 +1980,7 @@ impl<'a> Parser<'a> {
         if self.token.kind == token::Not {
             if let Err(mut err) = self.unexpected() {
                 // Encounter the macro invocation
-                err.subdiagnostic(self.dcx(), MacroExpandsToAdtField { adt_ty });
+                err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
                 return Err(err);
             }
         }
@@ -2382,13 +2396,10 @@ impl<'a> Parser<'a> {
                             .into_iter()
                             .any(|s| self.prev_token.is_ident_named(s));
 
-                        err.subdiagnostic(
-                            self.dcx(),
-                            errors::FnTraitMissingParen {
-                                span: self.prev_token.span,
-                                machine_applicable,
-                            },
-                        );
+                        err.subdiagnostic(errors::FnTraitMissingParen {
+                            span: self.prev_token.span,
+                            machine_applicable,
+                        });
                     }
                     return Err(err);
                 }
@@ -2437,7 +2448,7 @@ impl<'a> Parser<'a> {
                 })
             // `extern ABI fn`
             || self.check_keyword_case(kw::Extern, case)
-                && self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
+                && self.look_ahead(1, |t| t.can_begin_string_literal())
                 && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
                     // this branch is only for better diagnostic in later, `pub` is not allowed here
                     (self.may_recover()
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index adf04fcf224..cfd0a72c056 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -335,18 +335,6 @@ impl TokenType {
     }
 }
 
-/// Used by [`Parser::expect_any_with_type`].
-#[derive(Copy, Clone, Debug)]
-enum TokenExpectType {
-    /// Unencountered tokens are inserted into [`Parser::expected_tokens`].
-    /// See [`Parser::check`].
-    Expect,
-
-    /// Unencountered tokens are not inserted into [`Parser::expected_tokens`].
-    /// See [`Parser::check_noexpect`].
-    NoExpect,
-}
-
 /// A sequence separator.
 #[derive(Debug)]
 struct SeqSep {
@@ -807,11 +795,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Checks if the next token is contained within `kets`, and returns `true` if so.
-    fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
-        kets.iter().any(|k| match expect {
-            TokenExpectType::Expect => self.check(k),
-            TokenExpectType::NoExpect => self.check_noexpect(k),
-        })
+    fn expect_any_with_type(
+        &mut self,
+        kets_expected: &[&TokenKind],
+        kets_not_expected: &[&TokenKind],
+    ) -> bool {
+        kets_expected.iter().any(|k| self.check(k))
+            || kets_not_expected.iter().any(|k| self.check_noexpect(k))
     }
 
     /// Parses a sequence until the specified delimiters. The function
@@ -819,9 +809,9 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_before_tokens<T>(
         &mut self,
-        kets: &[&TokenKind],
+        kets_expected: &[&TokenKind],
+        kets_not_expected: &[&TokenKind],
         sep: SeqSep,
-        expect: TokenExpectType,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
         let mut first = true;
@@ -829,7 +819,7 @@ impl<'a> Parser<'a> {
         let mut trailing = Trailing::No;
         let mut v = ThinVec::new();
 
-        while !self.expect_any_with_type(kets, expect) {
+        while !self.expect_any_with_type(kets_expected, kets_not_expected) {
             if let token::CloseDelim(..) | token::Eof = self.token.kind {
                 break;
             }
@@ -927,7 +917,8 @@ impl<'a> Parser<'a> {
                                     if self.token == token::Colon {
                                         // we will try to recover in `maybe_recover_struct_lit_bad_delims`
                                         return Err(expect_err);
-                                    } else if let [token::CloseDelim(Delimiter::Parenthesis)] = kets
+                                    } else if let [token::CloseDelim(Delimiter::Parenthesis)] =
+                                        kets_expected
                                     {
                                         return Err(expect_err);
                                     } else {
@@ -940,7 +931,9 @@ impl<'a> Parser<'a> {
                     }
                 }
             }
-            if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
+            if sep.trailing_sep_allowed
+                && self.expect_any_with_type(kets_expected, kets_not_expected)
+            {
                 trailing = Trailing::Yes;
                 break;
             }
@@ -1020,7 +1013,7 @@ impl<'a> Parser<'a> {
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
-        self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
+        self.parse_seq_to_before_tokens(&[ket], &[], sep, f)
     }
 
     /// Parses a sequence, including only the closing delimiter. The function
@@ -1221,6 +1214,9 @@ impl<'a> Parser<'a> {
         if self.eat_keyword_case(kw::Unsafe, case) {
             Safety::Unsafe(self.prev_token.uninterpolated_span())
         } else if self.eat_keyword_case(kw::Safe, case) {
+            self.psess
+                .gated_spans
+                .gate(sym::unsafe_extern_blocks, self.prev_token.uninterpolated_span());
             Safety::Safe(self.prev_token.uninterpolated_span())
         } else {
             Safety::Default
@@ -1337,17 +1333,6 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn parse_or_use_outer_attributes(
-        &mut self,
-        already_parsed_attrs: Option<AttrWrapper>,
-    ) -> PResult<'a, AttrWrapper> {
-        if let Some(attrs) = already_parsed_attrs {
-            Ok(attrs)
-        } else {
-            self.parse_outer_attributes()
-        }
-    }
-
     /// Parses a single token tree from the input.
     pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
@@ -1603,7 +1588,7 @@ pub(crate) fn make_unclosed_delims_error(
     if let Some(sp) = unmatched.unclosed_span {
         spans.push(sp);
     };
-    let err = psess.dcx.create_err(MismatchedClosingDelimiter {
+    let err = psess.dcx().create_err(MismatchedClosingDelimiter {
         spans,
         delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
         unmatched: unmatched.found_span,
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 8af415f7c9d..6f2b7177159 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -10,7 +10,7 @@ use crate::errors::{
     UnexpectedParenInRangePatSugg, UnexpectedVertVertBeforeFunctionParam,
     UnexpectedVertVertInPattern,
 };
-use crate::parser::expr::could_be_unclosed_char_literal;
+use crate::parser::expr::{could_be_unclosed_char_literal, LhsExpr};
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
 use rustc_ast::ptr::P;
@@ -398,9 +398,8 @@ impl<'a> Parser<'a> {
 
             // Parse an associative expression such as `+ expr`, `% expr`, ...
             // Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
-            if let Ok(expr) =
-                snapshot.parse_expr_assoc_with(0, expr.into()).map_err(|err| err.cancel())
-            {
+            let lhs = LhsExpr::Parsed { expr, starts_statement: false };
+            if let Ok(expr) = snapshot.parse_expr_assoc_with(0, lhs).map_err(|err| err.cancel()) {
                 // We got a valid expression.
                 self.restore_snapshot(snapshot);
                 self.restrictions.remove(Restrictions::IS_PAT);
@@ -851,7 +850,7 @@ impl<'a> Parser<'a> {
 
         let sp = self.psess.source_map().start_point(self.token.span);
         if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp));
+            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
         }
 
         Err(err)
@@ -940,7 +939,8 @@ impl<'a> Parser<'a> {
             || self.look_ahead(dist, |t| {
                 t.is_path_start() // e.g. `MY_CONST`;
                 || t.kind == token::Dot // e.g. `.5` for recovery;
-                || t.can_begin_literal_maybe_minus() // e.g. `42`.
+                || matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
+                || t.is_bool_lit()
                 || t.is_whole_expr()
                 || t.is_lifetime() // recover `'a` instead of `'a'`
                 || (self.may_recover() // recover leading `(`
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 9beecd9849f..da8d1194325 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -920,7 +920,8 @@ impl<'a> Parser<'a> {
             // Fall back by trying to parse a const-expr expression. If we successfully do so,
             // then we should report an error that it needs to be wrapped in braces.
             let snapshot = self.create_snapshot_for_diagnostic();
-            match self.parse_expr_res(Restrictions::CONST_EXPR, None) {
+            let attrs = self.parse_outer_attributes()?;
+            match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
                 Ok(expr) => {
                     return Ok(Some(self.dummy_const_arg_needs_braces(
                         self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 104aae9b257..d65f6ff68ee 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -126,9 +126,9 @@ impl<'a> Parser<'a> {
             // Remainder are line-expr stmts.
             let e = match force_collect {
                 ForceCollect::Yes => self.collect_tokens_no_attrs(|this| {
-                    this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
+                    this.parse_expr_res(Restrictions::STMT_EXPR, attrs)
                 })?,
-                ForceCollect::No => self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))?,
+                ForceCollect::No => self.parse_expr_res(Restrictions::STMT_EXPR, attrs)?,
             };
             if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
                 let bl = self.parse_block()?;
@@ -174,10 +174,7 @@ impl<'a> Parser<'a> {
             // Perform this outside of the `collect_tokens_trailing_token` closure,
             // since our outer attributes do not apply to this part of the expression
             let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
-                this.parse_expr_assoc_with(
-                    0,
-                    LhsExpr::AlreadyParsed { expr, starts_statement: true },
-                )
+                this.parse_expr_assoc_with(0, LhsExpr::Parsed { expr, starts_statement: true })
             })?;
             Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
         } else {
@@ -210,10 +207,8 @@ impl<'a> Parser<'a> {
             let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
             let e = self.maybe_recover_from_bad_qpath(e)?;
             let e = self.parse_expr_dot_or_call_with(e, lo, attrs)?;
-            let e = self.parse_expr_assoc_with(
-                0,
-                LhsExpr::AlreadyParsed { expr: e, starts_statement: false },
-            )?;
+            let e = self
+                .parse_expr_assoc_with(0, LhsExpr::Parsed { expr: e, starts_statement: false })?;
             StmtKind::Expr(e)
         };
         Ok(self.mk_stmt(lo.to(hi), kind))
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index 79a6cf1b541..3a4690670af 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -61,7 +61,7 @@ where
 {
     let mut p = string_to_parser(&psess, s);
     let x = f(&mut p).unwrap();
-    p.psess.dcx.abort_if_errors();
+    p.dcx().abort_if_errors();
     x
 }
 
@@ -193,7 +193,7 @@ impl<T: Write> Write for Shared<T> {
 #[allow(rustc::untranslatable_diagnostic)] // no translation needed for tests
 fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
     create_default_session_globals_then(|| {
-        let (handler, source_map, output) = create_test_handler();
+        let (dcx, source_map, output) = create_test_handler();
         source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
 
         let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
@@ -205,7 +205,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
             println!("text: {:?}", source_map.span_to_snippet(span));
         }
 
-        handler.span_err(msp, "foo");
+        dcx.handle().span_err(msp, "foo");
 
         assert!(
             expected_output.chars().next() == Some('\n'),
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 5bed0317e5e..fcd623b477f 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -9,7 +9,7 @@ use crate::errors::{
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::token::{self, BinOpToken, Delimiter, Token, TokenKind};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
     self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, FnRetTy, GenericBound,
@@ -316,7 +316,7 @@ impl<'a> Parser<'a> {
                             TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
                         }
                         (TyKind::TraitObject(bounds, _), kw::Impl) => {
-                            TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds, None)
+                            TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
                         }
                         _ => return Err(err),
                     };
@@ -670,33 +670,26 @@ impl<'a> Parser<'a> {
             })
         }
 
-        // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
-        // lifetimes and ident params (including SelfUpper). These are validated later
-        // for order, duplication, and whether they actually reference params.
-        let precise_capturing = if self.eat_keyword(kw::Use) {
-            let use_span = self.prev_token.span;
-            self.psess.gated_spans.gate(sym::precise_capturing, use_span);
-            let (args, args_span) = self.parse_precise_capturing_args()?;
-            Some(P((args, use_span.to(args_span))))
-        } else {
-            None
-        };
-
         // Always parse bounds greedily for better error recovery.
         let bounds = self.parse_generic_bounds()?;
 
         *impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
 
-        Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds, precise_capturing))
+        Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
     }
 
     fn parse_precise_capturing_args(
         &mut self,
     ) -> PResult<'a, (ThinVec<PreciseCapturingArg>, Span)> {
         let lo = self.token.span;
-        let (args, _) = self.parse_unspanned_seq(
-            &TokenKind::Lt,
-            &TokenKind::Gt,
+        self.expect_lt()?;
+        let (args, _, _) = self.parse_seq_to_before_tokens(
+            &[&TokenKind::Gt],
+            &[
+                &TokenKind::Ge,
+                &TokenKind::BinOp(BinOpToken::Shr),
+                &TokenKind::BinOpEq(BinOpToken::Shr),
+            ],
             SeqSep::trailing_allowed(token::Comma),
             |self_| {
                 if self_.check_keyword(kw::SelfUpper) {
@@ -717,6 +710,7 @@ impl<'a> Parser<'a> {
                 }
             },
         )?;
+        self.expect_gt()?;
         Ok((args, lo.to(self.prev_token.span)))
     }
 
@@ -828,6 +822,7 @@ impl<'a> Parser<'a> {
             || self.check(&token::OpenDelim(Delimiter::Parenthesis))
             || self.check_keyword(kw::Const)
             || self.check_keyword(kw::Async)
+            || self.check_keyword(kw::Use)
     }
 
     /// Parses a bound according to the grammar:
@@ -844,6 +839,14 @@ impl<'a> Parser<'a> {
         let bound = if self.token.is_lifetime() {
             self.error_lt_bound_with_modifiers(modifiers);
             self.parse_generic_lt_bound(lo, inner_lo, has_parens)?
+        } else if self.eat_keyword(kw::Use) {
+            // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
+            // lifetimes and ident params (including SelfUpper). These are validated later
+            // for order, duplication, and whether they actually reference params.
+            let use_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::precise_capturing, use_span);
+            let (args, args_span) = self.parse_precise_capturing_args()?;
+            GenericBound::Use(args, use_span.to(args_span))
         } else {
             self.parse_generic_ty_bound(lo, has_parens, modifiers, &leading_token)?
         };
@@ -1003,7 +1006,7 @@ impl<'a> Parser<'a> {
                             Applicability::MaybeIncorrect,
                         )
                     }
-                    TyKind::ImplTrait(_, bounds, None)
+                    TyKind::ImplTrait(_, bounds)
                         if let [GenericBound::Trait(tr, ..), ..] = bounds.as_slice() =>
                     {
                         (
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 19d6f512572..4ca52146039 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -65,7 +65,7 @@ pub fn parse_meta<'a>(psess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Met
                     let res = match res {
                         Ok(lit) => {
                             if token_lit.suffix.is_some() {
-                                let mut err = psess.dcx.struct_span_err(
+                                let mut err = psess.dcx().struct_span_err(
                                     expr.span,
                                     "suffixed literals are not allowed in attributes",
                                 );
@@ -98,7 +98,7 @@ pub fn parse_meta<'a>(psess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Met
                     //   the error because an earlier error will have already
                     //   been reported.
                     let msg = "attribute value must be a literal";
-                    let mut err = psess.dcx.struct_span_err(expr.span, msg);
+                    let mut err = psess.dcx().struct_span_err(expr.span, msg);
                     if let ast::ExprKind::Err(_) = expr.kind {
                         err.downgrade_to_delayed_bug();
                     }
@@ -114,7 +114,7 @@ fn check_meta_bad_delim(psess: &ParseSess, span: DelimSpan, delim: Delimiter) {
     if let Delimiter::Parenthesis = delim {
         return;
     }
-    psess.dcx.emit_err(errors::MetaBadDelim {
+    psess.dcx().emit_err(errors::MetaBadDelim {
         span: span.entire(),
         sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
     });
@@ -124,7 +124,7 @@ pub(super) fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim
     if let Delimiter::Parenthesis = delim {
         return;
     }
-    psess.dcx.emit_err(errors::CfgAttrBadDelim {
+    psess.dcx().emit_err(errors::CfgAttrBadDelim {
         span: span.entire(),
         sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
     });
@@ -191,7 +191,7 @@ fn emit_malformed_attribute(
     } else {
         suggestions.sort();
         psess
-            .dcx
+            .dcx()
             .struct_span_err(span, error_msg)
             .with_span_suggestions(
                 span,