about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs7
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs28
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs7
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs45
-rw-r--r--compiler/rustc_parse/src/lib.rs2
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs109
-rw-r--r--compiler/rustc_parse/src/parser/item.rs11
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs26
-rw-r--r--compiler/rustc_parse/src/parser/path.rs20
9 files changed, 148 insertions, 107 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 44b4e1a3e47..35cf4c1b00d 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -2141,6 +2141,13 @@ pub(crate) enum UnknownPrefixSugg {
     )]
     UseBr(#[primary_span] Span),
     #[suggestion(
+        parse_suggestion_cr,
+        code = "cr",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    UseCr(#[primary_span] Span),
+    #[suggestion(
         parse_suggestion_whitespace,
         code = " ",
         applicability = "maybe-incorrect",
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
index e1f19beb53a..0b97d4e6993 100644
--- a/compiler/rustc_parse/src/lexer/diagnostics.rs
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -1,14 +1,17 @@
 use rustc_ast::token::Delimiter;
 use rustc_errors::Diag;
+use rustc_session::parse::ParseSess;
 use rustc_span::Span;
 use rustc_span::source_map::SourceMap;
 
 use super::UnmatchedDelim;
+use crate::errors::MismatchedClosingDelimiter;
+use crate::pprust;
 
 #[derive(Default)]
 pub(super) struct TokenTreeDiagInfo {
     /// Stack of open delimiters and their spans. Used for error message.
-    pub open_braces: Vec<(Delimiter, Span)>,
+    pub open_delimiters: Vec<(Delimiter, Span)>,
     pub unmatched_delims: Vec<UnmatchedDelim>,
 
     /// Used only for error recovery when arriving to EOF with mismatched braces.
@@ -108,7 +111,7 @@ pub(super) fn report_suspicious_mismatch_block(
     } else {
         // If there is no suspicious span, give the last properly closed block may help
         if let Some(parent) = diag_info.matching_block_spans.last()
-            && diag_info.open_braces.last().is_none()
+            && diag_info.open_delimiters.last().is_none()
             && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1))
         {
             err.span_label(parent.0, "this opening brace...");
@@ -116,3 +119,24 @@ pub(super) fn report_suspicious_mismatch_block(
         }
     }
 }
+
+pub(crate) fn make_unclosed_delims_error(
+    unmatched: UnmatchedDelim,
+    psess: &ParseSess,
+) -> Option<Diag<'_>> {
+    // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
+    // `unmatched_delims` only for error recovery in the `Parser`.
+    let found_delim = unmatched.found_delim?;
+    let mut spans = vec![unmatched.found_span];
+    if let Some(sp) = unmatched.unclosed_span {
+        spans.push(sp);
+    };
+    let err = psess.dcx().create_err(MismatchedClosingDelimiter {
+        spans,
+        delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(),
+        unmatched: unmatched.found_span,
+        opening_candidate: unmatched.candidate_span,
+        unclosed: unmatched.unclosed_span,
+    });
+    Some(err)
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index a8ec9a1e952..e8a5cae54cf 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -1,5 +1,6 @@
 use std::ops::Range;
 
+use diagnostics::make_unclosed_delims_error;
 use rustc_ast::ast::{self, AttrStyle};
 use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
 use rustc_ast::tokenstream::TokenStream;
@@ -17,9 +18,9 @@ use rustc_session::parse::ParseSess;
 use rustc_span::{BytePos, Pos, Span, Symbol};
 use tracing::debug;
 
+use crate::errors;
 use crate::lexer::diagnostics::TokenTreeDiagInfo;
 use crate::lexer::unicode_chars::UNICODE_ARRAY;
-use crate::{errors, make_unclosed_delims_error};
 
 mod diagnostics;
 mod tokentrees;
@@ -256,7 +257,6 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                     let lit_start = start + BytePos(prefix_len);
                     self.pos = lit_start;
                     self.cursor = Cursor::new(&str_before[prefix_len as usize..]);
-
                     self.report_unknown_prefix(start);
                     let prefix_span = self.mk_sp(start, lit_start);
                     return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
@@ -789,13 +789,14 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
     fn report_unknown_prefix(&self, start: BytePos) {
         let prefix_span = self.mk_sp(start, self.pos);
         let prefix = self.str_from_to(start, self.pos);
-
         let expn_data = prefix_span.ctxt().outer_expn_data();
 
         if expn_data.edition.at_least_rust_2021() {
             // In Rust 2021, this is a hard error.
             let sugg = if prefix == "rb" {
                 Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
+            } else if prefix == "rc" {
+                Some(errors::UnknownPrefixSugg::UseCr(prefix_span))
             } else if expn_data.is_root() {
                 if self.cursor.first() == '\''
                     && let Some(start) = self.last_lifetime
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index 0ddd9a85df8..fbea958dcc5 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -54,8 +54,8 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         let mut err = self.dcx().struct_span_err(self.token.span, msg);
 
         let unclosed_delimiter_show_limit = 5;
-        let len = usize::min(unclosed_delimiter_show_limit, self.diag_info.open_braces.len());
-        for &(_, span) in &self.diag_info.open_braces[..len] {
+        let len = usize::min(unclosed_delimiter_show_limit, self.diag_info.open_delimiters.len());
+        for &(_, span) in &self.diag_info.open_delimiters[..len] {
             err.span_label(span, "unclosed delimiter");
             self.diag_info.unmatched_delims.push(UnmatchedDelim {
                 found_delim: None,
@@ -65,19 +65,19 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
             });
         }
 
-        if let Some((_, span)) = self.diag_info.open_braces.get(unclosed_delimiter_show_limit)
-            && self.diag_info.open_braces.len() >= unclosed_delimiter_show_limit + 2
+        if let Some((_, span)) = self.diag_info.open_delimiters.get(unclosed_delimiter_show_limit)
+            && self.diag_info.open_delimiters.len() >= unclosed_delimiter_show_limit + 2
         {
             err.span_label(
                 *span,
                 format!(
                     "another {} unclosed delimiters begin from here",
-                    self.diag_info.open_braces.len() - unclosed_delimiter_show_limit
+                    self.diag_info.open_delimiters.len() - unclosed_delimiter_show_limit
                 ),
             );
         }
 
-        if let Some((delim, _)) = self.diag_info.open_braces.last() {
+        if let Some((delim, _)) = self.diag_info.open_delimiters.last() {
             report_suspicious_mismatch_block(
                 &mut err,
                 &self.diag_info,
@@ -95,7 +95,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         // The span for beginning of the delimited section.
         let pre_span = self.token.span;
 
-        self.diag_info.open_braces.push((open_delim, self.token.span));
+        self.diag_info.open_delimiters.push((open_delim, self.token.span));
 
         // Lex the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
@@ -109,11 +109,12 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         let close_spacing = if let Some(close_delim) = self.token.kind.close_delim() {
             if close_delim == open_delim {
                 // Correct delimiter.
-                let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
-                let close_brace_span = self.token.span;
+                let (open_delimiter, open_delimiter_span) =
+                    self.diag_info.open_delimiters.pop().unwrap();
+                let close_delimiter_span = self.token.span;
 
                 if tts.is_empty() && close_delim == Delimiter::Brace {
-                    let empty_block_span = open_brace_span.to(close_brace_span);
+                    let empty_block_span = open_delimiter_span.to(close_delimiter_span);
                     if !sm.is_multiline(empty_block_span) {
                         // Only track if the block is in the form of `{}`, otherwise it is
                         // likely that it was written on purpose.
@@ -122,9 +123,11 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                 }
 
                 // only add braces
-                if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) {
+                if let (Delimiter::Brace, Delimiter::Brace) = (open_delimiter, open_delim) {
                     // Add all the matching spans, we will sort by span later
-                    self.diag_info.matching_block_spans.push((open_brace_span, close_brace_span));
+                    self.diag_info
+                        .matching_block_spans
+                        .push((open_delimiter_span, close_delimiter_span));
                 }
 
                 // Move past the closing delimiter.
@@ -140,18 +143,18 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                     // This is a conservative error: only report the last unclosed
                     // delimiter. The previous unclosed delimiters could actually be
                     // closed! The lexer just hasn't gotten to them yet.
-                    if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
+                    if let Some(&(_, sp)) = self.diag_info.open_delimiters.last() {
                         unclosed_delimiter = Some(sp);
                     };
-                    for (brace, brace_span) in &self.diag_info.open_braces {
-                        if same_indentation_level(sm, self.token.span, *brace_span)
-                            && brace == &close_delim
+                    for (delimiter, delimiter_span) in &self.diag_info.open_delimiters {
+                        if same_indentation_level(sm, self.token.span, *delimiter_span)
+                            && delimiter == &close_delim
                         {
                             // high likelihood of these two corresponding
-                            candidate = Some(*brace_span);
+                            candidate = Some(*delimiter_span);
                         }
                     }
-                    let (_, _) = self.diag_info.open_braces.pop().unwrap();
+                    let (_, _) = self.diag_info.open_delimiters.pop().unwrap();
                     self.diag_info.unmatched_delims.push(UnmatchedDelim {
                         found_delim: Some(close_delim),
                         found_span: self.token.span,
@@ -159,7 +162,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                         candidate_span: candidate,
                     });
                 } else {
-                    self.diag_info.open_braces.pop();
+                    self.diag_info.open_delimiters.pop();
                 }
 
                 // If the incorrect delimiter matches an earlier opening
@@ -169,7 +172,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                 // fn foo() {
                 //     bar(baz(
                 // }  // Incorrect delimiter but matches the earlier `{`
-                if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
+                if !self.diag_info.open_delimiters.iter().any(|&(d, _)| d == close_delim) {
                     self.bump_minimal()
                 } else {
                     // The choice of value here doesn't matter.
@@ -180,7 +183,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
             assert_eq!(self.token.kind, token::Eof);
             // Silently recover, the EOF token will be seen again
             // and an error emitted then. Thus we don't pop from
-            // self.open_braces here. The choice of spacing value here
+            // self.open_delimiters here. The choice of spacing value here
             // doesn't matter.
             Spacing::Alone
         };
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 2edc8c83017..896e348a12d 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -32,7 +32,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
 
 #[macro_use]
 pub mod parser;
-use parser::{Parser, make_unclosed_delims_error};
+use parser::Parser;
 pub mod lexer;
 pub mod validate_attr;
 
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 11e60175a1f..370eb3f402d 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -26,6 +26,7 @@ use rustc_macros::Subdiagnostic;
 use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error};
 use rustc_session::lint::BuiltinLintDiag;
 use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
+use rustc_span::edition::Edition;
 use rustc_span::source_map::{self, Spanned};
 use rustc_span::{BytePos, ErrorGuaranteed, Ident, Pos, Span, Symbol, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
@@ -2145,6 +2146,17 @@ impl<'a> Parser<'a> {
     /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and
     /// `Lit::from_token` (excluding unary negation).
     fn eat_token_lit(&mut self) -> Option<token::Lit> {
+        let check_expr = |expr: P<Expr>| {
+            if let ast::ExprKind::Lit(token_lit) = expr.kind {
+                Some(token_lit)
+            } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind
+                && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner
+            {
+                None
+            } else {
+                panic!("unexpected reparsed expr/literal: {:?}", expr.kind);
+            }
+        };
         match self.token.uninterpolate().kind {
             token::Ident(name, IdentIsRaw::No) if name.is_bool_lit() => {
                 self.bump();
@@ -2158,10 +2170,7 @@ impl<'a> Parser<'a> {
                 let lit = self
                     .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
                     .expect("metavar seq literal");
-                let ast::ExprKind::Lit(token_lit) = lit.kind else {
-                    panic!("didn't reparse a literal");
-                };
-                Some(token_lit)
+                check_expr(lit)
             }
             token::OpenInvisible(InvisibleOrigin::MetaVar(
                 mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. },
@@ -2169,15 +2178,7 @@ impl<'a> Parser<'a> {
                 let expr = self
                     .eat_metavar_seq(mv_kind, |this| this.parse_expr())
                     .expect("metavar seq expr");
-                if let ast::ExprKind::Lit(token_lit) = expr.kind {
-                    Some(token_lit)
-                } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind
-                    && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner
-                {
-                    None
-                } else {
-                    panic!("unexpected reparsed expr: {:?}", expr.kind);
-                }
+                check_expr(expr)
             }
             _ => None,
         }
@@ -2602,7 +2603,10 @@ impl<'a> Parser<'a> {
     /// Parses an `if` expression (`if` token already eaten).
     fn parse_expr_if(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
-        let cond = self.parse_expr_cond()?;
+        // Scoping code checks the top level edition of the `if`; let's match it here.
+        // The `CondChecker` also checks the edition of the `let` itself, just to make sure.
+        let let_chains_policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() };
+        let cond = self.parse_expr_cond(let_chains_policy)?;
         self.parse_if_after_cond(lo, cond)
     }
 
@@ -2711,18 +2715,17 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the condition of a `if` or `while` expression.
+    ///
+    /// The specified `edition` in `let_chains_policy` should be that of the whole `if` construct,
+    /// i.e. the same span we use to later decide whether the drop behaviour should be that of
+    /// edition `..=2021` or that of `2024..`.
     // Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions.
-    pub fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
+    pub fn parse_expr_cond(&mut self, let_chains_policy: LetChainsPolicy) -> PResult<'a, P<Expr>> {
         let attrs = self.parse_outer_attributes()?;
         let (mut cond, _) =
             self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
 
-        CondChecker::new(self).visit_expr(&mut cond);
-
-        if let ExprKind::Let(_, _, _, Recovered::No) = cond.kind {
-            // Remove the last feature gating of a `let` expression since it's stable.
-            self.psess.gated_spans.ungate_last(sym::let_chains, cond.span);
-        }
+        CondChecker::new(self, let_chains_policy).visit_expr(&mut cond);
 
         Ok(cond)
     }
@@ -3017,7 +3020,8 @@ impl<'a> Parser<'a> {
 
     /// Parses a `while` or `while let` expression (`while` token already eaten).
     fn parse_expr_while(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
-        let cond = self.parse_expr_cond().map_err(|mut err| {
+        let policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() };
+        let cond = self.parse_expr_cond(policy).map_err(|mut err| {
             err.span_label(lo, "while parsing the condition of this `while` expression");
             err
         })?;
@@ -3401,17 +3405,17 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<P<Expr>>> {
-        // Used to check the `let_chains` and `if_let_guard` features mostly by scanning
+        // Used to check the `if_let_guard` feature mostly by scanning
         // `&&` tokens.
-        fn check_let_expr(expr: &Expr) -> (bool, bool) {
+        fn has_let_expr(expr: &Expr) -> bool {
             match &expr.kind {
                 ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
-                    let lhs_rslt = check_let_expr(lhs);
-                    let rhs_rslt = check_let_expr(rhs);
-                    (lhs_rslt.0 || rhs_rslt.0, false)
+                    let lhs_rslt = has_let_expr(lhs);
+                    let rhs_rslt = has_let_expr(rhs);
+                    lhs_rslt || rhs_rslt
                 }
-                ExprKind::Let(..) => (true, true),
-                _ => (false, true),
+                ExprKind::Let(..) => true,
+                _ => false,
             }
         }
         if !self.eat_keyword(exp!(If)) {
@@ -3422,14 +3426,9 @@ impl<'a> Parser<'a> {
         let if_span = self.prev_token.span;
         let mut cond = self.parse_match_guard_condition()?;
 
-        CondChecker::new(self).visit_expr(&mut cond);
+        CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond);
 
-        let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
-        if has_let_expr {
-            if does_not_have_bin_op {
-                // Remove the last feature gating of a `let` expression since it's stable.
-                self.psess.gated_spans.ungate_last(sym::let_chains, cond.span);
-            }
+        if has_let_expr(&cond) {
             let span = if_span.to(cond.span);
             self.psess.gated_spans.gate(sym::if_let_guard, span);
         }
@@ -3456,7 +3455,7 @@ impl<'a> Parser<'a> {
                     unreachable!()
                 };
                 self.psess.gated_spans.ungate_last(sym::guard_patterns, cond.span);
-                CondChecker::new(self).visit_expr(&mut cond);
+                CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond);
                 let right = self.prev_token.span;
                 self.dcx().emit_err(errors::ParenthesesInMatchPat {
                     span: vec![left, right],
@@ -4027,7 +4026,14 @@ pub(crate) enum ForbiddenLetReason {
     NotSupportedParentheses(#[primary_span] Span),
 }
 
-/// Visitor to check for invalid/unstable use of `ExprKind::Let` that can't
+/// Whether let chains are allowed on all editions, or it's edition dependent (allowed only on
+/// 2024 and later). In case of edition dependence, specify the currently present edition.
+pub enum LetChainsPolicy {
+    AlwaysAllowed,
+    EditionDependent { current_edition: Edition },
+}
+
+/// Visitor to check for invalid use of `ExprKind::Let` that can't
 /// easily be caught in parsing. For example:
 ///
 /// ```rust,ignore (example)
@@ -4038,19 +4044,29 @@ pub(crate) enum ForbiddenLetReason {
 /// ```
 struct CondChecker<'a> {
     parser: &'a Parser<'a>,
+    let_chains_policy: LetChainsPolicy,
+    depth: u32,
     forbid_let_reason: Option<ForbiddenLetReason>,
     missing_let: Option<errors::MaybeMissingLet>,
     comparison: Option<errors::MaybeComparison>,
 }
 
 impl<'a> CondChecker<'a> {
-    fn new(parser: &'a Parser<'a>) -> Self {
-        CondChecker { parser, forbid_let_reason: None, missing_let: None, comparison: None }
+    fn new(parser: &'a Parser<'a>, let_chains_policy: LetChainsPolicy) -> Self {
+        CondChecker {
+            parser,
+            forbid_let_reason: None,
+            missing_let: None,
+            comparison: None,
+            let_chains_policy,
+            depth: 0,
+        }
     }
 }
 
 impl MutVisitor for CondChecker<'_> {
     fn visit_expr(&mut self, e: &mut P<Expr>) {
+        self.depth += 1;
         use ForbiddenLetReason::*;
 
         let span = e.span;
@@ -4065,8 +4081,16 @@ impl MutVisitor for CondChecker<'_> {
                             comparison: self.comparison,
                         },
                     ));
-                } else {
-                    self.parser.psess.gated_spans.gate(sym::let_chains, span);
+                } else if self.depth > 1 {
+                    // Top level `let` is always allowed; only gate chains
+                    match self.let_chains_policy {
+                        LetChainsPolicy::AlwaysAllowed => (),
+                        LetChainsPolicy::EditionDependent { current_edition } => {
+                            if !current_edition.at_least_rust_2024() || !span.at_least_rust_2024() {
+                                self.parser.psess.gated_spans.gate(sym::let_chains, span);
+                            }
+                        }
+                    }
                 }
             }
             ExprKind::Binary(Spanned { node: BinOpKind::And, .. }, _, _) => {
@@ -4168,5 +4192,6 @@ impl MutVisitor for CondChecker<'_> {
                 // These would forbid any let expressions they contain already.
             }
         }
+        self.depth -= 1;
     }
 }
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 39251f1ce27..4be8a90368d 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -2058,6 +2058,17 @@ impl<'a> Parser<'a> {
         }
         self.expect_field_ty_separator()?;
         let ty = self.parse_ty()?;
+        if self.token == token::Colon && self.look_ahead(1, |&t| t != token::Colon) {
+            self.dcx()
+                .struct_span_err(self.token.span, "found single colon in a struct field type path")
+                .with_span_suggestion_verbose(
+                    self.token.span,
+                    "write a path separator here",
+                    "::",
+                    Applicability::MaybeIncorrect,
+                )
+                .emit();
+        }
         let default = if self.token == token::Eq {
             self.bump();
             let const_expr = self.parse_expr_anon_const()?;
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index d73adb39826..48df8b59d55 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -43,11 +43,8 @@ use token_type::TokenTypeSet;
 pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
 use tracing::debug;
 
-use crate::errors::{
-    self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
-};
+use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
 use crate::exp;
-use crate::lexer::UnmatchedDelim;
 
 #[cfg(test)]
 mod tests;
@@ -1745,27 +1742,6 @@ impl<'a> Parser<'a> {
     }
 }
 
-pub(crate) fn make_unclosed_delims_error(
-    unmatched: UnmatchedDelim,
-    psess: &ParseSess,
-) -> Option<Diag<'_>> {
-    // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
-    // `unmatched_delims` only for error recovery in the `Parser`.
-    let found_delim = unmatched.found_delim?;
-    let mut spans = vec![unmatched.found_span];
-    if let Some(sp) = unmatched.unclosed_span {
-        spans.push(sp);
-    };
-    let err = psess.dcx().create_err(MismatchedClosingDelimiter {
-        spans,
-        delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(),
-        unmatched: unmatched.found_span,
-        opening_candidate: unmatched.candidate_span,
-        unclosed: unmatched.unclosed_span,
-    });
-    Some(err)
-}
-
 /// A helper struct used when building an `AttrTokenStream` from
 /// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
 /// are stored as `FlatToken::Token`. A vector of `FlatToken`s
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 1a02d45f0e3..1093e4f4af0 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -248,19 +248,13 @@ impl<'a> Parser<'a> {
             segments.push(segment);
 
             if self.is_import_coupler() || !self.eat_path_sep() {
-                let ok_for_recovery = self.may_recover()
-                    && match style {
-                        PathStyle::Expr => true,
-                        PathStyle::Type if let Some((ident, _)) = self.prev_token.ident() => {
-                            self.token == token::Colon
-                                && ident.as_str().chars().all(|c| c.is_lowercase())
-                                && self.token.span.lo() == self.prev_token.span.hi()
-                                && self
-                                    .look_ahead(1, |token| self.token.span.hi() == token.span.lo())
-                        }
-                        _ => false,
-                    };
-                if ok_for_recovery
+                // IMPORTANT: We can *only ever* treat single colons as typo'ed double colons in
+                // expression contexts (!) since only there paths cannot possibly be followed by
+                // a colon and still form a syntactically valid construct. In pattern contexts,
+                // a path may be followed by a type annotation. E.g., `let pat:ty`. In type
+                // contexts, a path may be followed by a list of bounds. E.g., `where ty:bound`.
+                if self.may_recover()
+                    && style == PathStyle::Expr // (!)
                     && self.token == token::Colon
                     && self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
                 {