about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs70
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs6
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs162
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs58
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs81
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs2
-rw-r--r--compiler/rustc_parse/src/parser/item.rs118
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs26
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs29
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs3
-rw-r--r--compiler/rustc_parse/src/parser/path.rs27
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs17
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs100
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs75
14 files changed, 457 insertions, 317 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 46e15734853..3ae9b6dad99 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -1612,28 +1612,44 @@ pub(crate) struct DefaultNotFollowedByItem {
 
 #[derive(Diagnostic)]
 pub(crate) enum MissingKeywordForItemDefinition {
+    #[diag(parse_missing_enum_for_enum_definition)]
+    Enum {
+        #[primary_span]
+        span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "enum ")]
+        insert_span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_enum_or_struct_for_item_definition)]
+    EnumOrStruct {
+        #[primary_span]
+        span: Span,
+    },
     #[diag(parse_missing_struct_for_struct_definition)]
     Struct {
         #[primary_span]
-        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " struct ")]
         span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "struct ")]
+        insert_span: Span,
         ident: Ident,
     },
     #[diag(parse_missing_fn_for_function_definition)]
     Function {
         #[primary_span]
-        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
         span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "fn ")]
+        insert_span: Span,
         ident: Ident,
     },
     #[diag(parse_missing_fn_for_method_definition)]
     Method {
         #[primary_span]
-        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
         span: Span,
+        #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = "fn ")]
+        insert_span: Span,
         ident: Ident,
     },
-    #[diag(parse_ambiguous_missing_keyword_for_item_definition)]
+    #[diag(parse_missing_fn_or_struct_for_item_definition)]
     Ambiguous {
         #[primary_span]
         span: Span,
@@ -1644,7 +1660,12 @@ pub(crate) enum MissingKeywordForItemDefinition {
 
 #[derive(Subdiagnostic)]
 pub(crate) enum AmbiguousMissingKwForItemSub {
-    #[suggestion(parse_suggestion, applicability = "maybe-incorrect", code = "{snippet}!")]
+    #[suggestion(
+        parse_suggestion,
+        style = "verbose",
+        applicability = "maybe-incorrect",
+        code = "{snippet}!"
+    )]
     SuggestMacro {
         #[primary_span]
         span: Span,
@@ -2568,14 +2589,6 @@ pub(crate) struct BadReturnTypeNotationOutput {
 }
 
 #[derive(Diagnostic)]
-#[diag(parse_bad_return_type_notation_dotdot)]
-pub(crate) struct BadReturnTypeNotationDotDot {
-    #[primary_span]
-    #[suggestion(code = "", applicability = "maybe-incorrect")]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
 #[diag(parse_bad_assoc_type_bounds)]
 pub(crate) struct BadAssocTypeBounds {
     #[primary_span]
@@ -2997,3 +3010,34 @@ pub(crate) struct DotDotRangeAttribute {
     #[primary_span]
     pub span: Span,
 }
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_attr_unsafe)]
+#[note]
+pub struct InvalidAttrUnsafe {
+    #[primary_span]
+    pub span: Span,
+    pub name: Path,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unsafe_attr_outside_unsafe)]
+pub struct UnsafeAttrOutsideUnsafe {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub suggestion: UnsafeAttrOutsideUnsafeSuggestion,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_unsafe_attr_outside_unsafe_suggestion,
+    applicability = "machine-applicable"
+)]
+pub struct UnsafeAttrOutsideUnsafeSuggestion {
+    #[suggestion_part(code = "unsafe(")]
+    pub left: Span,
+    #[suggestion_part(code = ")")]
+    pub right: Span,
+}
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 58fef9b6c45..a8fe35f45b3 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -282,7 +282,7 @@ impl<'a> Parser<'a> {
     pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
         let mut attrs = ast::AttrVec::new();
         loop {
-            let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
+            let start_pos = self.num_bump_calls;
             // Only try to parse if it is an inner attribute (has `!`).
             let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
                 Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
@@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
                 None
             };
             if let Some(attr) = attr {
-                let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
+                let end_pos = self.num_bump_calls;
                 // If we are currently capturing tokens, mark the location of this inner attribute.
                 // If capturing ends up creating a `LazyAttrTokenStream`, we will include
                 // this replace range with it, removing the inner attribute from the final
@@ -313,7 +313,7 @@ impl<'a> Parser<'a> {
                 // corresponding macro).
                 let range = start_pos..end_pos;
                 if let Capturing::Yes = self.capture_state.capturing {
-                    self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
+                    self.capture_state.inner_attr_ranges.insert(attr.id, (range, None));
                 }
                 attrs.push(attr);
             } else {
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index b5480b6b7d2..38f18022e3c 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,6 +1,6 @@
 use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
 use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
@@ -8,7 +8,7 @@ use rustc_errors::PResult;
 use rustc_session::parse::ParseSess;
 use rustc_span::{sym, Span, DUMMY_SP};
 
-use std::ops::Range;
+use std::{iter, mem};
 
 /// A wrapper type to ensure that the parser handles outer attributes correctly.
 /// When we parse outer attributes, we need to ensure that we capture tokens
@@ -29,15 +29,15 @@ pub struct AttrWrapper {
     // The start of the outer attributes in the token cursor.
     // This allows us to create a `ReplaceRange` for the entire attribute
     // target, including outer attributes.
-    start_pos: usize,
+    start_pos: u32,
 }
 
 impl AttrWrapper {
-    pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
+    pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
         AttrWrapper { attrs, start_pos }
     }
     pub fn empty() -> AttrWrapper {
-        AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
+        AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
     }
 
     pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
@@ -53,7 +53,7 @@ impl AttrWrapper {
     // FIXME: require passing an NT to prevent misuse of this method
     pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
         let mut self_attrs = self.attrs;
-        std::mem::swap(attrs, &mut self_attrs);
+        mem::swap(attrs, &mut self_attrs);
         attrs.extend(self_attrs);
     }
 
@@ -87,11 +87,10 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
 //
 // This also makes `Parser` very cheap to clone, since
 // there is no intermediate collection buffer to clone.
-#[derive(Clone)]
 struct LazyAttrTokenStreamImpl {
     start_token: (Token, Spacing),
     cursor_snapshot: TokenCursor,
-    num_calls: usize,
+    num_calls: u32,
     break_last_token: bool,
     replace_ranges: Box<[ReplaceRange]>,
 }
@@ -104,15 +103,16 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
         // produce an empty `TokenStream` if no calls were made, and omit the
         // final token otherwise.
         let mut cursor_snapshot = self.cursor_snapshot.clone();
-        let tokens =
-            std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
-                .chain(std::iter::repeat_with(|| {
-                    let token = cursor_snapshot.next();
-                    (FlatToken::Token(token.0), token.1)
-                }))
-                .take(self.num_calls);
-
-        if !self.replace_ranges.is_empty() {
+        let tokens = iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
+            .chain(iter::repeat_with(|| {
+                let token = cursor_snapshot.next();
+                (FlatToken::Token(token.0), token.1)
+            }))
+            .take(self.num_calls as usize);
+
+        if self.replace_ranges.is_empty() {
+            make_attr_token_stream(tokens, self.break_last_token)
+        } else {
             let mut tokens: Vec<_> = tokens.collect();
             let mut replace_ranges = self.replace_ranges.to_vec();
             replace_ranges.sort_by_key(|(range, _)| range.start);
@@ -144,29 +144,26 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
             // start position, we ensure that any replace range which encloses
             // another replace range will capture the *replaced* tokens for the inner
             // range, not the original tokens.
-            for (range, new_tokens) in replace_ranges.into_iter().rev() {
+            for (range, target) in replace_ranges.into_iter().rev() {
                 assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
-                // Replace ranges are only allowed to decrease the number of tokens.
-                assert!(
-                    range.len() >= new_tokens.len(),
-                    "Range {range:?} has greater len than {new_tokens:?}"
-                );
-
-                // Replace any removed tokens with `FlatToken::Empty`.
-                // This keeps the total length of `tokens` constant throughout the
-                // replacement process, allowing us to use all of the `ReplaceRanges` entries
-                // without adjusting indices.
-                let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
-                    .take(range.len() - new_tokens.len());
 
+                // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
+                // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
+                // total length of `tokens` constant throughout the replacement process, allowing
+                // us to use all of the `ReplaceRanges` entries without adjusting indices.
+                let target_len = target.is_some() as usize;
                 tokens.splice(
                     (range.start as usize)..(range.end as usize),
-                    new_tokens.into_iter().chain(filler),
+                    target
+                        .into_iter()
+                        .map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone))
+                        .chain(
+                            iter::repeat((FlatToken::Empty, Spacing::Alone))
+                                .take(range.len() - target_len),
+                        ),
                 );
             }
-            make_token_stream(tokens.into_iter(), self.break_last_token)
-        } else {
-            make_token_stream(tokens, self.break_last_token)
+            make_attr_token_stream(tokens.into_iter(), self.break_last_token)
         }
     }
 }
@@ -218,24 +215,23 @@ impl<'a> Parser<'a> {
         let start_token = (self.token.clone(), self.token_spacing);
         let cursor_snapshot = self.token_cursor.clone();
         let start_pos = self.num_bump_calls;
-
         let has_outer_attrs = !attrs.attrs.is_empty();
-        let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
         let replace_ranges_start = self.capture_state.replace_ranges.len();
 
-        let ret = f(self, attrs.attrs);
-
-        self.capture_state.capturing = prev_capturing;
-
-        let (mut ret, trailing) = ret?;
+        let (mut ret, trailing) = {
+            let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
+            let ret_and_trailing = f(self, attrs.attrs);
+            self.capture_state.capturing = prev_capturing;
+            ret_and_trailing?
+        };
 
         // When we're not in `capture-cfg` mode, then bail out early if:
         // 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
         //    so there's nothing for us to do.
         // 2. Our target already has tokens set (e.g. we've parsed something
-        // like `#[my_attr] $item`. The actual parsing code takes care of prepending
-        // any attributes to the nonterminal, so we don't need to modify the
-        // already captured tokens.
+        //    like `#[my_attr] $item`). The actual parsing code takes care of
+        //    prepending any attributes to the nonterminal, so we don't need to
+        //    modify the already captured tokens.
         // Note that this check is independent of `force_collect`- if we already
         // have tokens, or can't even store them, then there's never a need to
         // force collection of new tokens.
@@ -276,37 +272,32 @@ impl<'a> Parser<'a> {
 
         let replace_ranges_end = self.capture_state.replace_ranges.len();
 
-        let mut end_pos = self.num_bump_calls;
-
-        let mut captured_trailing = false;
-
         // Capture a trailing token if requested by the callback 'f'
-        match trailing {
-            TrailingToken::None => {}
+        let captured_trailing = match trailing {
+            TrailingToken::None => false,
             TrailingToken::Gt => {
                 assert_eq!(self.token.kind, token::Gt);
+                false
             }
             TrailingToken::Semi => {
                 assert_eq!(self.token.kind, token::Semi);
-                end_pos += 1;
-                captured_trailing = true;
+                true
             }
-            TrailingToken::MaybeComma => {
-                if self.token.kind == token::Comma {
-                    end_pos += 1;
-                    captured_trailing = true;
-                }
-            }
-        }
+            TrailingToken::MaybeComma => self.token.kind == token::Comma,
+        };
 
-        // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
-        // then extend the range of captured tokens to include it, since the parser
-        // was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
-        // into an `AttrTokenStream`, we will create the proper token.
-        if self.break_last_token {
-            assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
-            end_pos += 1;
-        }
+        assert!(
+            !(self.break_last_token && captured_trailing),
+            "Cannot set break_last_token and have trailing token"
+        );
+
+        let end_pos = self.num_bump_calls
+            + captured_trailing as u32
+            // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
+            // extend the range of captured tokens to include it, since the parser was not actually
+            // bumped past it. When the `LazyAttrTokenStream` gets converted into an
+            // `AttrTokenStream`, we will create the proper token.
+            + self.break_last_token as u32;
 
         let num_calls = end_pos - start_pos;
 
@@ -318,14 +309,11 @@ impl<'a> Parser<'a> {
             // Grab any replace ranges that occur *inside* the current AST node.
             // We will perform the actual replacement when we convert the `LazyAttrTokenStream`
             // to an `AttrTokenStream`.
-            let start_calls: u32 = start_pos.try_into().unwrap();
             self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
                 .iter()
                 .cloned()
                 .chain(inner_attr_replace_ranges.iter().cloned())
-                .map(|(range, tokens)| {
-                    ((range.start - start_calls)..(range.end - start_calls), tokens)
-                })
+                .map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data))
                 .collect()
         };
 
@@ -340,7 +328,7 @@ impl<'a> Parser<'a> {
         // If we support tokens at all
         if let Some(target_tokens) = ret.tokens_mut() {
             if target_tokens.is_none() {
-                // Store se our newly captured tokens into the AST node
+                // Store our newly captured tokens into the AST node.
                 *target_tokens = Some(tokens.clone());
             }
         }
@@ -355,18 +343,14 @@ impl<'a> Parser<'a> {
             && matches!(self.capture_state.capturing, Capturing::Yes)
             && has_cfg_or_cfg_attr(final_attrs)
         {
-            let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
+            assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
 
-            // Replace the entire AST node that we just parsed, including attributes,
-            // with a `FlatToken::AttrTarget`. If this AST node is inside an item
-            // that has `#[derive]`, then this will allow us to cfg-expand this
-            // AST node.
+            // Replace the entire AST node that we just parsed, including attributes, with
+            // `target`. If this AST node is inside an item that has `#[derive]`, then this will
+            // allow us to cfg-expand this AST node.
             let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
-            let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
-
-            assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
-            let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
-            self.capture_state.replace_ranges.push((range, new_tokens));
+            let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens };
+            self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
             self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
         }
 
@@ -382,10 +366,10 @@ impl<'a> Parser<'a> {
     }
 }
 
-/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
-/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
-/// of open and close delims.
-fn make_token_stream(
+/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
+/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
+/// close delims.
+fn make_attr_token_stream(
     mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
     break_last_token: bool,
 ) -> AttrTokenStream {
@@ -428,11 +412,11 @@ fn make_token_stream(
                 .expect("Bottom token frame is missing!")
                 .inner
                 .push(AttrTokenTree::Token(token, spacing)),
-            FlatToken::AttrTarget(data) => stack
+            FlatToken::AttrsTarget(target) => stack
                 .last_mut()
                 .expect("Bottom token frame is missing!")
                 .inner
-                .push(AttrTokenTree::Attributes(data)),
+                .push(AttrTokenTree::AttrsTarget(target)),
             FlatToken::Empty => {}
         }
         token_and_spacing = iter.next();
@@ -464,6 +448,6 @@ mod size_asserts {
     use rustc_data_structures::static_assert_size;
     // tidy-alphabetical-start
     static_assert_size!(AttrWrapper, 16);
-    static_assert_size!(LazyAttrTokenStreamImpl, 104);
+    static_assert_size!(LazyAttrTokenStreamImpl, 96);
     // tidy-alphabetical-end
 }
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index c2b91488a11..81d5f0fca0e 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -2965,9 +2965,10 @@ impl<'a> Parser<'a> {
 
     /// This checks if this is a conflict marker, depending of the parameter passed.
     ///
-    /// * `>>>>>`
-    /// * `=====`
-    /// * `<<<<<`
+    /// * `<<<<<<<`
+    /// * `|||||||`
+    /// * `=======`
+    /// * `>>>>>>>`
     ///
     pub(super) fn is_vcs_conflict_marker(
         &mut self,
@@ -2997,14 +2998,18 @@ impl<'a> Parser<'a> {
     }
 
     pub(crate) fn err_vcs_conflict_marker(&mut self) -> PResult<'a, ()> {
+        // <<<<<<<
         let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt)
         else {
             return Ok(());
         };
         let mut spans = Vec::with_capacity(3);
         spans.push(start);
+        // |||||||
         let mut middlediff3 = None;
+        // =======
         let mut middle = None;
+        // >>>>>>>
         let mut end = None;
         loop {
             if self.token.kind == TokenKind::Eof {
@@ -3025,29 +3030,50 @@ impl<'a> Parser<'a> {
             }
             self.bump();
         }
+
         let mut err = self.dcx().struct_span_err(spans, "encountered diff marker");
-        err.span_label(start, "after this is the code before the merge");
-        if let Some(middle) = middlediff3 {
-            err.span_label(middle, "");
-        }
+        match middlediff3 {
+            // We're using diff3
+            Some(middlediff3) => {
+                err.span_label(
+                    start,
+                    "between this marker and `|||||||` is the code that we're merging into",
+                );
+                err.span_label(middlediff3, "between this marker and `=======` is the base code (what the two refs diverged from)");
+            }
+            None => {
+                err.span_label(
+                    start,
+                    "between this marker and `=======` is the code that we're merging into",
+                );
+            }
+        };
+
         if let Some(middle) = middle {
-            err.span_label(middle, "");
+            err.span_label(middle, "between this marker and `>>>>>>>` is the incoming code");
         }
         if let Some(end) = end {
-            err.span_label(end, "above this are the incoming code changes");
+            err.span_label(end, "this marker concludes the conflict region");
         }
-        err.help(
-            "if you're having merge conflicts after pulling new code, the top section is the code \
-             you already had and the bottom section is the remote code",
+        err.note(
+            "conflict markers indicate that a merge was started but could not be completed due \
+             to merge conflicts\n\
+             to resolve a conflict, keep only the code you want and then delete the lines \
+             containing conflict markers",
         );
         err.help(
-            "if you're in the middle of a rebase, the top section is the code being rebased onto \
-             and the bottom section is the code coming from the current commit being rebased",
+            "if you're having merge conflicts after pulling new code:\n\
+             the top section is the code you already had and the bottom section is the remote code\n\
+             if you're in the middle of a rebase:\n\
+             the top section is the code being rebased onto and the bottom section is the code \
+             coming from the current commit being rebased",
         );
+
         err.note(
-            "for an explanation on these markers from the `git` documentation, visit \
-             <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
+            "for an explanation on these markers from the `git` documentation:\n\
+             visit <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
         );
+
         Err(err)
     }
 
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 09f706143fa..b2df9a14eb0 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,4 +1,5 @@
 // ignore-tidy-filelength
+
 use super::diagnostics::SnapshotParser;
 use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
@@ -38,36 +39,6 @@ use rustc_span::{BytePos, ErrorGuaranteed, Pos, Span};
 use thin_vec::{thin_vec, ThinVec};
 use tracing::instrument;
 
-/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
-/// dropped into the token stream, which happens while parsing the result of
-/// macro expansion). Placement of these is not as complex as I feared it would
-/// be. The important thing is to make sure that lookahead doesn't balk at
-/// `token::Interpolated` tokens.
-macro_rules! maybe_whole_expr {
-    ($p:expr) => {
-        if let token::Interpolated(nt) = &$p.token.kind {
-            match &**nt {
-                token::NtExpr(e) | token::NtLiteral(e) => {
-                    let e = e.clone();
-                    $p.bump();
-                    return Ok(e);
-                }
-                token::NtPath(path) => {
-                    let path = (**path).clone();
-                    $p.bump();
-                    return Ok($p.mk_expr($p.prev_token.span, ExprKind::Path(None, path)));
-                }
-                token::NtBlock(block) => {
-                    let block = block.clone();
-                    $p.bump();
-                    return Ok($p.mk_expr($p.prev_token.span, ExprKind::Block(block, None)));
-                }
-                _ => {}
-            };
-        }
-    };
-}
-
 #[derive(Debug)]
 pub(super) enum LhsExpr {
     // Already parsed just the outer attributes.
@@ -1420,7 +1391,27 @@ impl<'a> Parser<'a> {
     /// correctly if called from `parse_dot_or_call_expr()`.
     fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
-        maybe_whole_expr!(self);
+
+        if let token::Interpolated(nt) = &self.token.kind {
+            match &**nt {
+                token::NtExpr(e) | token::NtLiteral(e) => {
+                    let e = e.clone();
+                    self.bump();
+                    return Ok(e);
+                }
+                token::NtPath(path) => {
+                    let path = (**path).clone();
+                    self.bump();
+                    return Ok(self.mk_expr(self.prev_token.span, ExprKind::Path(None, path)));
+                }
+                token::NtBlock(block) => {
+                    let block = block.clone();
+                    self.bump();
+                    return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None)));
+                }
+                _ => {}
+            };
+        }
 
         // Outer attributes are already parsed and will be
         // added to the return value after the fact.
@@ -2189,7 +2180,26 @@ impl<'a> Parser<'a> {
     /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
     /// Keep this in sync with `Token::can_begin_literal_maybe_minus`.
     pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
-        maybe_whole_expr!(self);
+        if let token::Interpolated(nt) = &self.token.kind {
+            match &**nt {
+                // FIXME(nnethercote) The `NtExpr` case should only match if
+                // `e` is an `ExprKind::Lit` or an `ExprKind::Unary` containing
+                // an `UnOp::Neg` and an `ExprKind::Lit`, like how
+                // `can_begin_literal_maybe_minus` works. But this method has
+                // been over-accepting for a long time, and to make that change
+                // here requires also changing some `parse_literal_maybe_minus`
+                // call sites to accept additional expression kinds. E.g.
+                // `ExprKind::Path` must be accepted when parsing range
+                // patterns. That requires some care. So for now, we continue
+                // being less strict here than we should be.
+                token::NtExpr(e) | token::NtLiteral(e) => {
+                    let e = e.clone();
+                    self.bump();
+                    return Ok(e);
+                }
+                _ => {}
+            };
+        }
 
         let lo = self.token.span;
         let minus_present = self.eat(&token::BinOp(token::Minus));
@@ -2317,7 +2327,7 @@ impl<'a> Parser<'a> {
         let before = self.prev_token.clone();
         let binder = if self.check_keyword(kw::For) {
             let lo = self.token.span;
-            let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+            let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
             let span = lo.to(self.prev_token.span);
 
             self.psess.gated_spans.gate(sym::closure_lifetime_binder, span);
@@ -3422,8 +3432,9 @@ impl<'a> Parser<'a> {
             }
         }
         let capture_clause = self.parse_capture_clause()?;
+        let decl_span = lo.to(self.prev_token.span);
         let (attrs, body) = self.parse_inner_attrs_and_block()?;
-        let kind = ExprKind::Gen(capture_clause, body, kind);
+        let kind = ExprKind::Gen(capture_clause, body, kind, decl_span);
         Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
     }
 
@@ -4012,7 +4023,7 @@ impl MutVisitor for CondChecker<'_> {
             | ExprKind::Match(_, _, _)
             | ExprKind::Closure(_)
             | ExprKind::Block(_, _)
-            | ExprKind::Gen(_, _, _)
+            | ExprKind::Gen(_, _, _, _)
             | ExprKind::TryBlock(_)
             | ExprKind::Underscore
             | ExprKind::Path(_, _)
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index fde16ac957d..10c7715c7dc 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -457,7 +457,7 @@ impl<'a> Parser<'a> {
         // * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
         // * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
         // * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
-        let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+        let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
 
         // Parse type with mandatory colon and (possibly empty) bounds,
         // or with mandatory equality sign and the second type.
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 2db777a9f70..f31e634f55c 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -239,6 +239,7 @@ impl<'a> Parser<'a> {
                 self.recover_const_impl(const_span, attrs, def_())?
             } else {
                 self.recover_const_mut(const_span);
+                self.recover_missing_kw_before_item()?;
                 let (ident, generics, ty, expr) = self.parse_const_item()?;
                 (
                     ident,
@@ -311,6 +312,9 @@ impl<'a> Parser<'a> {
                 Case::Insensitive,
             );
         } else if macros_allowed && self.check_path() {
+            if self.isnt_macro_invocation() {
+                self.recover_missing_kw_before_item()?;
+            }
             // MACRO INVOCATION ITEM
             (Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?)))
         } else {
@@ -374,25 +378,25 @@ impl<'a> Parser<'a> {
         self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
     }
 
-    /// Recover on encountering a struct or method definition where the user
-    /// forgot to add the `struct` or `fn` keyword after writing `pub`: `pub S {}`.
+    /// Recover on encountering a struct, enum, or method definition where the user
+    /// forgot to add the `struct`, `enum`, or `fn` keyword
     fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
-        // Space between `pub` keyword and the identifier
-        //
-        //     pub   S {}
-        //        ^^^ `sp` points here
-        let sp = self.prev_token.span.between(self.token.span);
-        let full_sp = self.prev_token.span.to(self.token.span);
-        let ident_sp = self.token.span;
-
-        let ident = if self.look_ahead(1, |t| {
-            [
-                token::Lt,
-                token::OpenDelim(Delimiter::Brace),
-                token::OpenDelim(Delimiter::Parenthesis),
-            ]
-            .contains(&t.kind)
-        }) {
+        let is_pub = self.prev_token.is_keyword(kw::Pub);
+        let is_const = self.prev_token.is_keyword(kw::Const);
+        let ident_span = self.token.span;
+        let span = if is_pub { self.prev_token.span.to(ident_span) } else { ident_span };
+        let insert_span = ident_span.shrink_to_lo();
+
+        let ident = if (!is_const
+            || self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)))
+            && self.look_ahead(1, |t| {
+                [
+                    token::Lt,
+                    token::OpenDelim(Delimiter::Brace),
+                    token::OpenDelim(Delimiter::Parenthesis),
+                ]
+                .contains(&t.kind)
+            }) {
             self.parse_ident().unwrap()
         } else {
             return Ok(());
@@ -406,46 +410,56 @@ impl<'a> Parser<'a> {
         }
 
         let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
-            // possible public struct definition where `struct` was forgotten
-            Some(errors::MissingKeywordForItemDefinition::Struct { span: sp, ident })
+            // possible struct or enum definition where `struct` or `enum` was forgotten
+            if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Brace)) {
+                // `S {}` could be unit enum or struct
+                Some(errors::MissingKeywordForItemDefinition::EnumOrStruct { span })
+            } else if self.look_ahead(2, |t| *t == token::Colon)
+                || self.look_ahead(3, |t| *t == token::Colon)
+            {
+                // `S { f:` or `S { pub f:`
+                Some(errors::MissingKeywordForItemDefinition::Struct { span, insert_span, ident })
+            } else {
+                Some(errors::MissingKeywordForItemDefinition::Enum { span, insert_span, ident })
+            }
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
-            // possible public function or tuple struct definition where `fn`/`struct` was
-            // forgotten
+            // possible function or tuple struct definition where `fn` or `struct` was forgotten
             self.bump(); // `(`
             let is_method = self.recover_self_param();
 
             self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
 
-            let err =
-                if self.check(&token::RArrow) || self.check(&token::OpenDelim(Delimiter::Brace)) {
-                    self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
-                    self.bump(); // `{`
-                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
-                    if is_method {
-                        errors::MissingKeywordForItemDefinition::Method { span: sp, ident }
-                    } else {
-                        errors::MissingKeywordForItemDefinition::Function { span: sp, ident }
-                    }
-                } else if self.check(&token::Semi) {
-                    errors::MissingKeywordForItemDefinition::Struct { span: sp, ident }
+            let err = if self.check(&token::RArrow)
+                || self.check(&token::OpenDelim(Delimiter::Brace))
+            {
+                self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
+                self.bump(); // `{`
+                self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                if is_method {
+                    errors::MissingKeywordForItemDefinition::Method { span, insert_span, ident }
                 } else {
-                    errors::MissingKeywordForItemDefinition::Ambiguous {
-                        span: sp,
-                        subdiag: if found_generics {
-                            None
-                        } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
-                            Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
-                                span: full_sp,
-                                snippet,
-                            })
-                        } else {
-                            Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
-                        },
-                    }
-                };
+                    errors::MissingKeywordForItemDefinition::Function { span, insert_span, ident }
+                }
+            } else if is_pub && self.check(&token::Semi) {
+                errors::MissingKeywordForItemDefinition::Struct { span, insert_span, ident }
+            } else {
+                errors::MissingKeywordForItemDefinition::Ambiguous {
+                    span,
+                    subdiag: if found_generics {
+                        None
+                    } else if let Ok(snippet) = self.span_to_snippet(ident_span) {
+                        Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
+                            span: ident_span,
+                            snippet,
+                        })
+                    } else {
+                        Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
+                    },
+                }
+            };
             Some(err)
         } else if found_generics {
-            Some(errors::MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None })
+            Some(errors::MissingKeywordForItemDefinition::Ambiguous { span, subdiag: None })
         } else {
             None
         };
@@ -1228,7 +1242,7 @@ impl<'a> Parser<'a> {
                                 ident_span: ident.span,
                                 const_span,
                             });
-                            ForeignItemKind::Static(Box::new(StaticForeignItem {
+                            ForeignItemKind::Static(Box::new(StaticItem {
                                 ty,
                                 mutability: Mutability::Not,
                                 expr,
@@ -1259,7 +1273,7 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw::Unsafe)
             && self.is_keyword_ahead(1, &[kw::Extern])
             && self.look_ahead(
-                2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize),
+                2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
                 |t| t.kind == token::OpenDelim(Delimiter::Brace),
             )
     }
@@ -2448,7 +2462,7 @@ impl<'a> Parser<'a> {
                 })
             // `extern ABI fn`
             || self.check_keyword_case(kw::Extern, case)
-                && self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
+                && self.look_ahead(1, |t| t.can_begin_string_literal())
                 && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
                     // this branch is only for better diagnostic in later, `pub` is not allowed here
                     (self.may_recover()
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 2f12459da57..45ca267fe5d 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -20,7 +20,7 @@ use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttrsTarget, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
@@ -153,7 +153,7 @@ pub struct Parser<'a> {
     expected_tokens: Vec<TokenType>,
     token_cursor: TokenCursor,
     // The number of calls to `bump`, i.e. the position in the token stream.
-    num_bump_calls: usize,
+    num_bump_calls: u32,
     // During parsing we may sometimes need to 'unglue' a glued token into two
     // component tokens (e.g. '>>' into '>' and '>), so the parser can consume
     // them one at a time. This process bypasses the normal capturing mechanism
@@ -192,7 +192,7 @@ pub struct Parser<'a> {
 // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
 // it doesn't unintentionally get bigger.
 #[cfg(target_pointer_width = "64")]
-rustc_data_structures::static_assert_size!(Parser<'_>, 264);
+rustc_data_structures::static_assert_size!(Parser<'_>, 256);
 
 /// Stores span information about a closure.
 #[derive(Clone, Debug)]
@@ -203,13 +203,13 @@ struct ClosureSpans {
 }
 
 /// Indicates a range of tokens that should be replaced by
-/// the tokens in the provided vector. This is used in two
+/// the tokens in the provided `AttrsTarget`. This is used in two
 /// places during token collection:
 ///
 /// 1. During the parsing of an AST node that may have a `#[derive]`
 /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
 /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
-/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
+/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion
 /// on an `AttrTokenStream`.
 ///
 /// 2. When we parse an inner attribute while collecting tokens. We
@@ -219,7 +219,7 @@ struct ClosureSpans {
 /// the first macro inner attribute to invoke a proc-macro).
 /// When create a `TokenStream`, the inner attributes get inserted
 /// into the proper place in the token stream.
-type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
+type ReplaceRange = (Range<u32>, Option<AttrsTarget>);
 
 /// Controls how we capture tokens. Capturing can be expensive,
 /// so we try to avoid performing capturing in cases where
@@ -1214,6 +1214,9 @@ impl<'a> Parser<'a> {
         if self.eat_keyword_case(kw::Unsafe, case) {
             Safety::Unsafe(self.prev_token.uninterpolated_span())
         } else if self.eat_keyword_case(kw::Safe, case) {
+            self.psess
+                .gated_spans
+                .gate(sym::unsafe_extern_blocks, self.prev_token.uninterpolated_span());
             Safety::Safe(self.prev_token.uninterpolated_span())
         } else {
             Safety::Default
@@ -1569,7 +1572,7 @@ impl<'a> Parser<'a> {
         self.expected_tokens.clear();
     }
 
-    pub fn approx_token_stream_pos(&self) -> usize {
+    pub fn approx_token_stream_pos(&self) -> u32 {
         self.num_bump_calls
     }
 }
@@ -1605,11 +1608,10 @@ enum FlatToken {
     /// A token - this holds both delimiter (e.g. '{' and '}')
     /// and non-delimiter tokens
     Token(Token),
-    /// Holds the `AttributesData` for an AST node. The
-    /// `AttributesData` is inserted directly into the
-    /// constructed `AttrTokenStream` as
-    /// an `AttrTokenTree::Attributes`.
-    AttrTarget(AttributesData),
+    /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
+    /// directly into the constructed `AttrTokenStream` as an
+    /// `AttrTokenTree::AttrsTarget`.
+    AttrsTarget(AttrsTarget),
     /// A special 'empty' token that is ignored during the conversion
     /// to an `AttrTokenStream`. This is used to simplify the
     /// handling of replace ranges.
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index a0b704aeea5..4a78b427832 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -1,5 +1,7 @@
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token};
+use rustc_ast::token::{
+    self, Delimiter, Nonterminal::*, NonterminalKind, NtExprKind::*, NtPatKind::*, Token,
+};
 use rustc_ast::HasTokens;
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
@@ -36,18 +38,17 @@ impl<'a> Parser<'a> {
         }
 
         match kind {
-            NonterminalKind::Expr2021 => {
+            NonterminalKind::Expr(Expr2021 { .. }) => {
                 token.can_begin_expr()
                 // This exception is here for backwards compatibility.
                 && !token.is_keyword(kw::Let)
                 // This exception is here for backwards compatibility.
                 && !token.is_keyword(kw::Const)
             }
-            NonterminalKind::Expr => {
+            NonterminalKind::Expr(Expr) => {
                 token.can_begin_expr()
                 // This exception is here for backwards compatibility.
                 && !token.is_keyword(kw::Let)
-                && (!token.is_keyword(kw::Const) || token.span.edition().at_least_rust_2024())
             }
             NonterminalKind::Ty => token.can_begin_type(),
             NonterminalKind::Ident => get_macro_ident(token).is_some(),
@@ -75,7 +76,7 @@ impl<'a> Parser<'a> {
                 token::Interpolated(nt) => may_be_ident(nt),
                 _ => false,
             },
-            NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind {
+            NonterminalKind::Pat(pat_kind) => match &token.kind {
                 // box, ref, mut, and other identifiers (can stricten)
                 token::Ident(..) | token::NtIdent(..) |
                 token::OpenDelim(Delimiter::Parenthesis) |  // tuple pattern
@@ -90,7 +91,7 @@ impl<'a> Parser<'a> {
                 token::Lt |                                 // path (UFCS constant)
                 token::BinOp(token::Shl) => true,           // path (double UFCS)
                 // leading vert `|` or-pattern
-                token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
+                token::BinOp(token::Or) => matches!(pat_kind, PatWithOr),
                 token::Interpolated(nt) => may_be_ident(nt),
                 _ => false,
             },
@@ -136,31 +137,25 @@ impl<'a> Parser<'a> {
                         .create_err(UnexpectedNonterminal::Statement(self.token.span)));
                 }
             },
-            NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
-                NtPat(self.collect_tokens_no_attrs(|this| match kind {
-                    NonterminalKind::PatParam { .. } => this.parse_pat_no_top_alt(None, None),
-                    NonterminalKind::PatWithOr => this.parse_pat_allow_top_alt(
+            NonterminalKind::Pat(pat_kind) => {
+                NtPat(self.collect_tokens_no_attrs(|this| match pat_kind {
+                    PatParam { .. } => this.parse_pat_no_top_alt(None, None),
+                    PatWithOr => this.parse_pat_allow_top_alt(
                         None,
                         RecoverComma::No,
                         RecoverColon::No,
                         CommaRecoveryMode::EitherTupleOrPipe,
                     ),
-                    _ => unreachable!(),
                 })?)
             }
-
-            NonterminalKind::Expr | NonterminalKind::Expr2021 => {
-                NtExpr(self.parse_expr_force_collect()?)
-            }
+            NonterminalKind::Expr(_) => NtExpr(self.parse_expr_force_collect()?),
             NonterminalKind::Literal => {
                 // The `:literal` matcher does not support attributes
                 NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?)
             }
-
             NonterminalKind::Ty => {
                 NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?)
             }
-
             // this could be handled like a token, since it is one
             NonterminalKind::Ident => {
                 return if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 03aea0888d9..6f2b7177159 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -939,7 +939,8 @@ impl<'a> Parser<'a> {
             || self.look_ahead(dist, |t| {
                 t.is_path_start() // e.g. `MY_CONST`;
                 || t.kind == token::Dot // e.g. `.5` for recovery;
-                || t.can_begin_literal_maybe_minus() // e.g. `42`.
+                || matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
+                || t.is_bool_lit()
                 || t.is_whole_expr()
                 || t.is_lifetime() // recover `'a` instead of `'a'`
                 || (self.may_recover() // recover leading `(`
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index da8d1194325..03c647dd527 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -353,18 +353,17 @@ impl<'a> Parser<'a> {
                     })?;
                     let span = lo.to(self.prev_token.span);
                     AngleBracketedArgs { args, span }.into()
-                } else if self.may_recover()
-                    && self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
+                } else if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
                     // FIXME(return_type_notation): Could also recover `...` here.
                     && self.look_ahead(1, |tok| tok.kind == token::DotDot)
                 {
-                    self.bump();
-                    self.dcx()
-                        .emit_err(errors::BadReturnTypeNotationDotDot { span: self.token.span });
-                    self.bump();
+                    self.bump(); // (
+                    self.bump(); // ..
                     self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
                     let span = lo.to(self.prev_token.span);
 
+                    self.psess.gated_spans.gate(sym::return_type_notation, span);
+
                     if self.eat_noexpect(&token::RArrow) {
                         let lo = self.prev_token.span;
                         let ty = self.parse_ty()?;
@@ -372,13 +371,7 @@ impl<'a> Parser<'a> {
                             .emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
                     }
 
-                    ParenthesizedArgs {
-                        span,
-                        inputs: ThinVec::new(),
-                        inputs_span: span,
-                        output: ast::FnRetTy::Default(self.prev_token.span.shrink_to_hi()),
-                    }
-                    .into()
+                    P(ast::GenericArgs::ParenthesizedElided(span))
                 } else {
                     // `(T, U) -> R`
 
@@ -733,14 +726,6 @@ impl<'a> Parser<'a> {
 
                     let span = lo.to(self.prev_token.span);
 
-                    if let AssocItemConstraintKind::Bound { .. } = kind
-                        && let Some(ast::GenericArgs::Parenthesized(args)) = &gen_args
-                        && args.inputs.is_empty()
-                        && let ast::FnRetTy::Default(..) = args.output
-                    {
-                        self.psess.gated_spans.gate(sym::return_type_notation, span);
-                    }
-
                     let constraint =
                         AssocItemConstraint { id: ast::DUMMY_NODE_ID, ident, gen_args, kind, span };
                     Ok(Some(AngleBracketedArg::Constraint(constraint)))
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index 3a4690670af..42392ad2163 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -322,9 +322,8 @@ error: foo
  --> test.rs:3:3
   |
 3 |      X0 Y0
-  |   ___^__-
-  |  |___|
-  | ||
+  |  ____^  -
+  | | ______|
 4 | ||   X1 Y1
 5 | ||   X2 Y2
   | ||____^__- `Y` is a good letter too
@@ -361,9 +360,8 @@ error: foo
  --> test.rs:3:3
   |
 3 |      X0 Y0
-  |   ___^__-
-  |  |___|
-  | ||
+  |  ____^  -
+  | | ______|
 4 | ||   Y1 X1
   | ||____-__^ `X` is a good letter
   |  |____|
@@ -445,10 +443,9 @@ error: foo
  --> test.rs:3:3
   |
 3 |       X0 Y0 Z0
-  |    ___^__-__-
-  |   |___|__|
-  |  ||___|
-  | |||
+  |  _____^  -  -
+  | | _______|  |
+  | || _________|
 4 | |||   X1 Y1 Z1
 5 | |||   X2 Y2 Z2
   | |||____^__-__- `Z` label
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index fcd623b477f..1e5b227aaa9 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -18,7 +18,7 @@ use rustc_ast::{
 };
 use rustc_errors::{Applicability, PResult};
 use rustc_span::symbol::{kw, sym, Ident};
-use rustc_span::{Span, Symbol};
+use rustc_span::{ErrorGuaranteed, Span, Symbol};
 use thin_vec::{thin_vec, ThinVec};
 
 #[derive(Copy, Clone, PartialEq)]
@@ -280,7 +280,7 @@ impl<'a> Parser<'a> {
             // Function pointer type or bound list (trait object type) starting with a poly-trait.
             //   `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
             //   `for<'lt> Trait1<'lt> + Trait2 + 'a`
-            let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+            let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
             if self.check_fn_front_matter(false, Case::Sensitive) {
                 self.parse_ty_bare_fn(
                     lo,
@@ -608,7 +608,7 @@ impl<'a> Parser<'a> {
             self.dcx().emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
         }
         // FIXME(gen_blocks): emit a similar error for `gen fn()`
-        let decl_span = span_start.to(self.token.span);
+        let decl_span = span_start.to(self.prev_token.span);
         Ok(TyKind::BareFn(P(BareFnTy { ext, safety, generic_params: params, decl, decl_span })))
     }
 
@@ -833,12 +833,9 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         let leading_token = self.prev_token.clone();
         let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
-        let inner_lo = self.token.span;
 
-        let modifiers = self.parse_trait_bound_modifiers()?;
         let bound = if self.token.is_lifetime() {
-            self.error_lt_bound_with_modifiers(modifiers);
-            self.parse_generic_lt_bound(lo, inner_lo, has_parens)?
+            self.parse_generic_lt_bound(lo, has_parens)?
         } else if self.eat_keyword(kw::Use) {
             // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
             // lifetimes and ident params (including SelfUpper). These are validated later
@@ -848,7 +845,7 @@ impl<'a> Parser<'a> {
             let (args, args_span) = self.parse_precise_capturing_args()?;
             GenericBound::Use(args, use_span.to(args_span))
         } else {
-            self.parse_generic_ty_bound(lo, has_parens, modifiers, &leading_token)?
+            self.parse_generic_ty_bound(lo, has_parens, &leading_token)?
         };
 
         Ok(bound)
@@ -858,50 +855,64 @@ impl<'a> Parser<'a> {
     /// ```ebnf
     /// LT_BOUND = LIFETIME
     /// ```
-    fn parse_generic_lt_bound(
-        &mut self,
-        lo: Span,
-        inner_lo: Span,
-        has_parens: bool,
-    ) -> PResult<'a, GenericBound> {
-        let bound = GenericBound::Outlives(self.expect_lifetime());
+    fn parse_generic_lt_bound(&mut self, lo: Span, has_parens: bool) -> PResult<'a, GenericBound> {
+        let lt = self.expect_lifetime();
+        let bound = GenericBound::Outlives(lt);
         if has_parens {
             // FIXME(Centril): Consider not erroring here and accepting `('lt)` instead,
             // possibly introducing `GenericBound::Paren(P<GenericBound>)`?
-            self.recover_paren_lifetime(lo, inner_lo)?;
+            self.recover_paren_lifetime(lo, lt.ident.span)?;
         }
         Ok(bound)
     }
 
     /// Emits an error if any trait bound modifiers were present.
-    fn error_lt_bound_with_modifiers(&self, modifiers: TraitBoundModifiers) {
-        match modifiers.constness {
+    fn error_lt_bound_with_modifiers(
+        &self,
+        modifiers: TraitBoundModifiers,
+        binder_span: Option<Span>,
+    ) -> ErrorGuaranteed {
+        let TraitBoundModifiers { constness, asyncness, polarity } = modifiers;
+
+        match constness {
             BoundConstness::Never => {}
             BoundConstness::Always(span) | BoundConstness::Maybe(span) => {
-                self.dcx().emit_err(errors::ModifierLifetime {
-                    span,
-                    modifier: modifiers.constness.as_str(),
-                });
+                return self
+                    .dcx()
+                    .emit_err(errors::ModifierLifetime { span, modifier: constness.as_str() });
             }
         }
 
-        match modifiers.polarity {
+        match polarity {
             BoundPolarity::Positive => {}
             BoundPolarity::Negative(span) | BoundPolarity::Maybe(span) => {
-                self.dcx().emit_err(errors::ModifierLifetime {
-                    span,
-                    modifier: modifiers.polarity.as_str(),
-                });
+                return self
+                    .dcx()
+                    .emit_err(errors::ModifierLifetime { span, modifier: polarity.as_str() });
+            }
+        }
+
+        match asyncness {
+            BoundAsyncness::Normal => {}
+            BoundAsyncness::Async(span) => {
+                return self
+                    .dcx()
+                    .emit_err(errors::ModifierLifetime { span, modifier: asyncness.as_str() });
             }
         }
+
+        if let Some(span) = binder_span {
+            return self.dcx().emit_err(errors::ModifierLifetime { span, modifier: "for<...>" });
+        }
+
+        unreachable!("lifetime bound intercepted in `parse_generic_ty_bound` but no modifiers?")
     }
 
     /// Recover on `('lifetime)` with `(` already eaten.
-    fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
-        let inner_span = inner_lo.to(self.prev_token.span);
+    fn recover_paren_lifetime(&mut self, lo: Span, lt_span: Span) -> PResult<'a, ()> {
         self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
         let span = lo.to(self.prev_token.span);
-        let (sugg, snippet) = if let Ok(snippet) = self.span_to_snippet(inner_span) {
+        let (sugg, snippet) = if let Ok(snippet) = self.span_to_snippet(lt_span) {
             (Some(span), snippet)
         } else {
             (None, String::new())
@@ -916,7 +927,7 @@ impl<'a> Parser<'a> {
     /// If no modifiers are present, this does not consume any tokens.
     ///
     /// ```ebnf
-    /// TRAIT_BOUND_MODIFIERS = [["~"] "const"] ["?" | "!"]
+    /// TRAIT_BOUND_MODIFIERS = [["~"] "const"] ["async"] ["?" | "!"]
     /// ```
     fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
         let constness = if self.eat(&token::Tilde) {
@@ -970,15 +981,23 @@ impl<'a> Parser<'a> {
     /// TY_BOUND_NOPAREN = [TRAIT_BOUND_MODIFIERS] [for<LT_PARAM_DEFS>] SIMPLE_PATH
     /// ```
     ///
-    /// For example, this grammar accepts `~const ?for<'a: 'b> m::Trait<'a>`.
+    /// For example, this grammar accepts `for<'a: 'b> ~const ?m::Trait<'a>`.
     fn parse_generic_ty_bound(
         &mut self,
         lo: Span,
         has_parens: bool,
-        modifiers: TraitBoundModifiers,
         leading_token: &Token,
     ) -> PResult<'a, GenericBound> {
-        let mut lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+        let modifiers = self.parse_trait_bound_modifiers()?;
+        let (mut lifetime_defs, binder_span) = self.parse_late_bound_lifetime_defs()?;
+
+        // Recover erroneous lifetime bound with modifiers or binder.
+        // e.g. `T: for<'a> 'a` or `T: ~const 'a`.
+        if self.token.is_lifetime() {
+            let _: ErrorGuaranteed = self.error_lt_bound_with_modifiers(modifiers, binder_span);
+            return self.parse_generic_lt_bound(lo, has_parens);
+        }
+
         let mut path = if self.token.is_keyword(kw::Fn)
             && self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
             && let Some(path) = self.recover_path_from_fn()
@@ -1094,16 +1113,19 @@ impl<'a> Parser<'a> {
     }
 
     /// Optionally parses `for<$generic_params>`.
-    pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, ThinVec<GenericParam>> {
+    pub(super) fn parse_late_bound_lifetime_defs(
+        &mut self,
+    ) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> {
         if self.eat_keyword(kw::For) {
+            let lo = self.token.span;
             self.expect_lt()?;
             let params = self.parse_generic_params()?;
             self.expect_gt()?;
-            // We rely on AST validation to rule out invalid cases: There must not be type
-            // parameters, and the lifetime parameters must not have bounds.
-            Ok(params)
+            // We rely on AST validation to rule out invalid cases: There must not be
+            // type or const parameters, and parameters must not have bounds.
+            Ok((params, Some(lo.to(self.prev_token.span))))
         } else {
-            Ok(ThinVec::new())
+            Ok((ThinVec::new(), None))
         }
     }
 
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 4ca52146039..3d5e6371f4c 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -4,22 +4,76 @@ use crate::{errors, parse_in};
 
 use rustc_ast::token::Delimiter;
 use rustc_ast::tokenstream::DelimSpan;
-use rustc_ast::MetaItemKind;
-use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MetaItem};
+use rustc_ast::{
+    self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MetaItem, MetaItemKind,
+    NestedMetaItem, Safety,
+};
 use rustc_errors::{Applicability, FatalError, PResult};
-use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
+use rustc_feature::{
+    AttributeSafety, AttributeTemplate, BuiltinAttribute, Features, BUILTIN_ATTRIBUTE_MAP,
+};
 use rustc_session::errors::report_lit_error;
-use rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT;
+use rustc_session::lint::builtin::{ILL_FORMED_ATTRIBUTE_INPUT, UNSAFE_ATTR_OUTSIDE_UNSAFE};
 use rustc_session::lint::BuiltinLintDiag;
 use rustc_session::parse::ParseSess;
-use rustc_span::{sym, Span, Symbol};
+use rustc_span::{sym, BytePos, Span, Symbol};
 
-pub fn check_attr(psess: &ParseSess, attr: &Attribute) {
+pub fn check_attr(features: &Features, psess: &ParseSess, attr: &Attribute) {
     if attr.is_doc_comment() {
         return;
     }
 
     let attr_info = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name));
+    let attr_item = attr.get_normal_item();
+
+    let is_unsafe_attr = attr_info.is_some_and(|attr| attr.safety == AttributeSafety::Unsafe);
+
+    if features.unsafe_attributes {
+        if is_unsafe_attr {
+            if let ast::Safety::Default = attr_item.unsafety {
+                let path_span = attr_item.path.span;
+
+                // If the `attr_item`'s span is not from a macro, then just suggest
+                // wrapping it in `unsafe(...)`. Otherwise, we suggest putting the
+                // `unsafe(`, `)` right after and right before the opening and closing
+                // square bracket respectively.
+                let diag_span = if attr_item.span().can_be_used_for_suggestions() {
+                    attr_item.span()
+                } else {
+                    attr.span
+                        .with_lo(attr.span.lo() + BytePos(2))
+                        .with_hi(attr.span.hi() - BytePos(1))
+                };
+
+                if attr.span.at_least_rust_2024() {
+                    psess.dcx().emit_err(errors::UnsafeAttrOutsideUnsafe {
+                        span: path_span,
+                        suggestion: errors::UnsafeAttrOutsideUnsafeSuggestion {
+                            left: diag_span.shrink_to_lo(),
+                            right: diag_span.shrink_to_hi(),
+                        },
+                    });
+                } else {
+                    psess.buffer_lint(
+                        UNSAFE_ATTR_OUTSIDE_UNSAFE,
+                        path_span,
+                        ast::CRATE_NODE_ID,
+                        BuiltinLintDiag::UnsafeAttrOutsideUnsafe {
+                            attribute_name_span: path_span,
+                            sugg_spans: (diag_span.shrink_to_lo(), diag_span.shrink_to_hi()),
+                        },
+                    );
+                }
+            }
+        } else {
+            if let Safety::Unsafe(unsafe_span) = attr_item.unsafety {
+                psess.dcx().emit_err(errors::InvalidAttrUnsafe {
+                    span: unsafe_span,
+                    name: attr_item.path.clone(),
+                });
+            }
+        }
+    }
 
     // Check input tokens for built-in and key-value attributes.
     match attr_info {
@@ -32,7 +86,7 @@ pub fn check_attr(psess: &ParseSess, attr: &Attribute) {
                 }
             }
         }
-        _ if let AttrArgs::Eq(..) = attr.get_normal_item().args => {
+        _ if let AttrArgs::Eq(..) = attr_item.args => {
             // All key-value attributes are restricted to meta-item syntax.
             match parse_meta(psess, attr) {
                 Ok(_) => {}
@@ -132,9 +186,13 @@ pub(super) fn check_cfg_attr_bad_delim(psess: &ParseSess, span: DelimSpan, delim
 
 /// Checks that the given meta-item is compatible with this `AttributeTemplate`.
 fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaItemKind) -> bool {
+    let is_one_allowed_subword = |items: &[NestedMetaItem]| match items {
+        [item] => item.is_word() && template.one_of.iter().any(|&word| item.has_name(word)),
+        _ => false,
+    };
     match meta {
         MetaItemKind::Word => template.word,
-        MetaItemKind::List(..) => template.list.is_some(),
+        MetaItemKind::List(items) => template.list.is_some() || is_one_allowed_subword(items),
         MetaItemKind::NameValue(lit) if lit.kind.is_str() => template.name_value_str.is_some(),
         MetaItemKind::NameValue(..) => false,
     }
@@ -178,6 +236,7 @@ fn emit_malformed_attribute(
     if let Some(descr) = template.list {
         suggestions.push(format!("#{inner}[{name}({descr})]"));
     }
+    suggestions.extend(template.one_of.iter().map(|&word| format!("#{inner}[{name}({word})]")));
     if let Some(descr) = template.name_value_str {
         suggestions.push(format!("#{inner}[{name} = \"{descr}\"]"));
     }