about summary refs log tree commit diff
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
authorOli Scherer <github35764891676564198441@oli-obk.de>2024-12-15 05:35:37 +0000
committerGitHub <noreply@github.com>2024-12-15 05:35:37 +0000
commite999c48404b492659f90993e9fdd2544c675abce (patch)
tree57e9863e82b35af828fb9ea01e1067f656f8cf26 /compiler/rustc_parse
parentd5322f75df7ec605bbcca5d9ec329e32bb696f69 (diff)
parent581989d1c6de8ece197b1317e3093203d3203d66 (diff)
downloadrust-e999c48404b492659f90993e9fdd2544c675abce.tar.gz
rust-e999c48404b492659f90993e9fdd2544c675abce.zip
Merge pull request #4095 from rust-lang/rustup-2024-12-15
Automatic Rustup
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs40
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs94
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs23
-rw-r--r--compiler/rustc_parse/src/parser/item.rs2
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs19
5 files changed, 77 insertions, 101 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 2426eb81678..443ddfc94ec 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -69,24 +69,30 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
         token: Token::dummy(),
         diag_info: TokenTreeDiagInfo::default(),
     };
-    let (_open_spacing, stream, res) = lexer.lex_token_trees(/* is_delimited */ false);
-    let unmatched_delims = lexer.diag_info.unmatched_delims;
-
-    if res.is_ok() && unmatched_delims.is_empty() {
-        Ok(stream)
-    } else {
-        // Return error if there are unmatched delimiters or unclosed delimiters.
-        // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
-        // because the delimiter mismatch is more likely to be the root cause of error
-        let mut buffer: Vec<_> = unmatched_delims
-            .into_iter()
-            .filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
-            .collect();
-        if let Err(errs) = res {
-            // Add unclosing delimiter or diff marker errors
-            buffer.extend(errs);
+    let res = lexer.lex_token_trees(/* is_delimited */ false);
+
+    let mut unmatched_delims: Vec<_> = lexer
+        .diag_info
+        .unmatched_delims
+        .into_iter()
+        .filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
+        .collect();
+
+    match res {
+        Ok((_open_spacing, stream)) => {
+            if unmatched_delims.is_empty() {
+                Ok(stream)
+            } else {
+                // Return error if there are unmatched delimiters or unclosed delimiters.
+                Err(unmatched_delims)
+            }
+        }
+        Err(errs) => {
+            // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
+            // because the delimiter mismatch is more likely to be the root cause of error
+            unmatched_delims.extend(errs);
+            Err(unmatched_delims)
         }
-        Err(buffer)
     }
 }
 
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index ee38f16d4ec..b3f83a32024 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -1,12 +1,10 @@
 use rustc_ast::token::{self, Delimiter, Token};
 use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
-use rustc_errors::{Applicability, Diag};
-use rustc_span::symbol::kw;
+use rustc_errors::Diag;
 
 use super::diagnostics::{report_suspicious_mismatch_block, same_indentation_level};
 use super::{Lexer, UnmatchedDelim};
-use crate::Parser;
 
 impl<'psess, 'src> Lexer<'psess, 'src> {
     // Lex into a token stream. The `Spacing` in the result is that of the
@@ -14,7 +12,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
     pub(super) fn lex_token_trees(
         &mut self,
         is_delimited: bool,
-    ) -> (Spacing, TokenStream, Result<(), Vec<Diag<'psess>>>) {
+    ) -> Result<(Spacing, TokenStream), Vec<Diag<'psess>>> {
         // Move past the opening delimiter.
         let open_spacing = self.bump_minimal();
 
@@ -27,25 +25,25 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                     debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
                     buf.push(match self.lex_token_tree_open_delim(delim) {
                         Ok(val) => val,
-                        Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
+                        Err(errs) => return Err(errs),
                     })
                 }
                 token::CloseDelim(delim) => {
                     // Invisible delimiters cannot occur here because `TokenTreesReader` parses
                     // code directly from strings, with no macro expansion involved.
                     debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
-                    return (
-                        open_spacing,
-                        TokenStream::new(buf),
-                        if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
-                    );
+                    return if is_delimited {
+                        Ok((open_spacing, TokenStream::new(buf)))
+                    } else {
+                        Err(vec![self.close_delim_err(delim)])
+                    };
                 }
                 token::Eof => {
-                    return (
-                        open_spacing,
-                        TokenStream::new(buf),
-                        if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
-                    );
+                    return if is_delimited {
+                        Err(vec![self.eof_err()])
+                    } else {
+                        Ok((open_spacing, TokenStream::new(buf)))
+                    };
                 }
                 _ => {
                     // Get the next normal token.
@@ -107,10 +105,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         // Lex the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let (open_spacing, tts, res) = self.lex_token_trees(/* is_delimited */ true);
-        if let Err(errs) = res {
-            return Err(self.unclosed_delim_err(tts, errs));
-        }
+        let (open_spacing, tts) = self.lex_token_trees(/* is_delimited */ true)?;
 
         // Expand to cover the entire delimited token tree.
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
@@ -247,67 +242,6 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         this_spacing
     }
 
-    fn unclosed_delim_err(
-        &mut self,
-        tts: TokenStream,
-        mut errs: Vec<Diag<'psess>>,
-    ) -> Vec<Diag<'psess>> {
-        // If there are unclosed delims, see if there are diff markers and if so, point them
-        // out instead of complaining about the unclosed delims.
-        let mut parser = Parser::new(self.psess, tts, None);
-        let mut diff_errs = vec![];
-        // Suggest removing a `{` we think appears in an `if`/`while` condition.
-        // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition,
-        // but we have no way of tracking this in the lexer itself, so we piggyback on the parser.
-        let mut in_cond = false;
-        while parser.token != token::Eof {
-            if let Err(diff_err) = parser.err_vcs_conflict_marker() {
-                diff_errs.push(diff_err);
-            } else if parser.is_keyword_ahead(0, &[kw::If, kw::While]) {
-                in_cond = true;
-            } else if matches!(
-                parser.token.kind,
-                token::CloseDelim(Delimiter::Brace) | token::FatArrow
-            ) {
-                // End of the `if`/`while` body, or the end of a `match` guard.
-                in_cond = false;
-            } else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
-                // Store the `&&` and `let` to use their spans later when creating the diagnostic
-                let maybe_andand = parser.look_ahead(1, |t| t.clone());
-                let maybe_let = parser.look_ahead(2, |t| t.clone());
-                if maybe_andand == token::OpenDelim(Delimiter::Brace) {
-                    // This might be the beginning of the `if`/`while` body (i.e., the end of the
-                    // condition).
-                    in_cond = false;
-                } else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
-                    let mut err = parser.dcx().struct_span_err(
-                        parser.token.span,
-                        "found a `{` in the middle of a let-chain",
-                    );
-                    err.span_suggestion(
-                        parser.token.span,
-                        "consider removing this brace to parse the `let` as part of the same chain",
-                        "",
-                        Applicability::MachineApplicable,
-                    );
-                    err.span_label(
-                        maybe_andand.span.to(maybe_let.span),
-                        "you might have meant to continue the let-chain here",
-                    );
-                    errs.push(err);
-                }
-            }
-            parser.bump();
-        }
-        if !diff_errs.is_empty() {
-            for err in errs {
-                err.cancel();
-            }
-            return diff_errs;
-        }
-        errs
-    }
-
     fn close_delim_err(&mut self, delim: Delimiter) -> Diag<'psess> {
         // An unexpected closing delimiter (i.e., there is no matching opening delimiter).
         let token_str = token_to_string(&self.token);
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 44f42e5fbf2..a2136399b0c 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -15,7 +15,7 @@ use rustc_ast::visit::{Visitor, walk_expr};
 use rustc_ast::{
     self as ast, AnonConst, Arm, AttrStyle, AttrVec, BinOp, BinOpKind, BlockCheckMode, CaptureBy,
     ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl, FnRetTy, Label, MacCall,
-    MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, UnOp,
+    MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, UnOp, UnsafeBinderCastKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::stack::ensure_sufficient_stack;
@@ -1931,6 +1931,12 @@ impl<'a> Parser<'a> {
             Ok(match ident.name {
                 sym::offset_of => Some(this.parse_expr_offset_of(lo)?),
                 sym::type_ascribe => Some(this.parse_expr_type_ascribe(lo)?),
+                sym::wrap_binder => {
+                    Some(this.parse_expr_unsafe_binder_cast(lo, UnsafeBinderCastKind::Wrap)?)
+                }
+                sym::unwrap_binder => {
+                    Some(this.parse_expr_unsafe_binder_cast(lo, UnsafeBinderCastKind::Unwrap)?)
+                }
                 _ => None,
             })
         })
@@ -2006,6 +2012,17 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span, ExprKind::Type(expr, ty)))
     }
 
+    pub(crate) fn parse_expr_unsafe_binder_cast(
+        &mut self,
+        lo: Span,
+        kind: UnsafeBinderCastKind,
+    ) -> PResult<'a, P<Expr>> {
+        let expr = self.parse_expr()?;
+        let ty = if self.eat(&TokenKind::Comma) { Some(self.parse_ty()?) } else { None };
+        let span = lo.to(self.token.span);
+        Ok(self.mk_expr(span, ExprKind::UnsafeBinderCast(kind, expr, ty)))
+    }
+
     /// Returns a string literal if the next token is a string literal.
     /// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
     /// and returns `None` if the next token is not literal at all.
@@ -4016,7 +4033,9 @@ impl MutVisitor for CondChecker<'_> {
                 mut_visit::walk_expr(self, e);
                 self.forbid_let_reason = forbid_let_reason;
             }
-            ExprKind::Cast(ref mut op, _) | ExprKind::Type(ref mut op, _) => {
+            ExprKind::Cast(ref mut op, _)
+            | ExprKind::Type(ref mut op, _)
+            | ExprKind::UnsafeBinderCast(_, ref mut op, _) => {
                 let forbid_let_reason = self.forbid_let_reason;
                 self.forbid_let_reason = Some(OtherForbidden);
                 self.visit_expr(op);
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 58b4bf8980b..e27fc963eb9 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -45,7 +45,7 @@ impl<'a> Parser<'a> {
             let (inner_attrs, items, inner_span) =
                 self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
             attrs.extend(inner_attrs);
-            ModKind::Loaded(items, Inline::Yes, inner_span)
+            ModKind::Loaded(items, Inline::Yes, inner_span, Ok(()))
         };
         Ok((id, ItemKind::Mod(safety, mod_kind)))
     }
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 8cff23c2e32..f696074e66a 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -5,7 +5,7 @@ use rustc_ast::{
     self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy,
     GenericBound, GenericBounds, GenericParam, Generics, Lifetime, MacCall, MutTy, Mutability,
     Pinnedness, PolyTraitRef, PreciseCapturingArg, TraitBoundModifiers, TraitObjectSyntax, Ty,
-    TyKind,
+    TyKind, UnsafeBinderTy,
 };
 use rustc_errors::{Applicability, PResult};
 use rustc_span::symbol::{Ident, kw, sym};
@@ -348,6 +348,10 @@ impl<'a> Parser<'a> {
                     TyKind::Err(guar)
                 }
             }
+        } else if self.check_keyword(kw::Unsafe)
+            && self.look_ahead(1, |tok| matches!(tok.kind, token::Lt))
+        {
+            self.parse_unsafe_binder_ty()?
         } else {
             let msg = format!("expected type, found {}", super::token_descr(&self.token));
             let mut err = self.dcx().struct_span_err(lo, msg);
@@ -369,6 +373,19 @@ impl<'a> Parser<'a> {
         if allow_qpath_recovery { self.maybe_recover_from_bad_qpath(ty) } else { Ok(ty) }
     }
 
+    fn parse_unsafe_binder_ty(&mut self) -> PResult<'a, TyKind> {
+        let lo = self.token.span;
+        assert!(self.eat_keyword(kw::Unsafe));
+        self.expect_lt()?;
+        let generic_params = self.parse_generic_params()?;
+        self.expect_gt()?;
+        let inner_ty = self.parse_ty()?;
+        let span = lo.to(self.prev_token.span);
+        self.psess.gated_spans.gate(sym::unsafe_binders, span);
+
+        Ok(TyKind::UnsafeBinder(P(UnsafeBinderTy { generic_params, inner_ty })))
+    }
+
     /// Parses either:
     /// - `(TYPE)`, a parenthesized type.
     /// - `(TYPE,)`, a tuple with a single field of type TYPE.