about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs37
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs35
-rw-r--r--compiler/rustc_parse/src/lib.rs2
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs2
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs4
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs164
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs7
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs56
8 files changed, 226 insertions, 81 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index e276b34ca37..0de252707bd 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -1065,8 +1065,8 @@ impl<'a> IntoDiagnostic<'a> for ExpectedIdentifier {
                 None => fluent::parse_expected_identifier_found_str,
             },
         );
-        diag.set_span(self.span);
-        diag.set_arg("token", self.token);
+        diag.span(self.span);
+        diag.arg("token", self.token);
 
         if let Some(sugg) = self.suggest_raw {
             sugg.add_to_diagnostic(&mut diag);
@@ -1123,8 +1123,8 @@ impl<'a> IntoDiagnostic<'a> for ExpectedSemi {
                 None => fluent::parse_expected_semi_found_str,
             },
         );
-        diag.set_span(self.span);
-        diag.set_arg("token", self.token);
+        diag.span(self.span);
+        diag.arg("token", self.token);
 
         if let Some(unexpected_token_label) = self.unexpected_token_label {
             diag.span_label(unexpected_token_label, fluent::parse_label_unexpected_token);
@@ -2379,6 +2379,27 @@ pub(crate) struct ExpectedCommaAfterPatternField {
 }
 
 #[derive(Diagnostic)]
+#[diag(parse_unexpected_paren_in_range_pat)]
+pub(crate) struct UnexpectedParenInRangePat {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: UnexpectedParenInRangePatSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_unexpected_paren_in_range_pat_sugg,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct UnexpectedParenInRangePatSugg {
+    #[suggestion_part(code = "")]
+    pub start_span: Span,
+    #[suggestion_part(code = "")]
+    pub end_span: Span,
+}
+
+#[derive(Diagnostic)]
 #[diag(parse_return_types_use_thin_arrow)]
 pub(crate) struct ReturnTypesUseThinArrow {
     #[primary_span]
@@ -2887,3 +2908,11 @@ pub(crate) struct TransposeDynOrImplSugg<'a> {
     pub insertion_span: Span,
     pub kw: &'a str,
 }
+
+#[derive(Diagnostic)]
+#[diag(parse_array_index_offset_of)]
+pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_offset_of)]
+pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 083d1984e00..4819ed6021d 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -7,9 +7,7 @@ use rustc_ast::ast::{self, AttrStyle};
 use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{
-    error_code, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder, FatalAbort, StashKey,
-};
+use rustc_errors::{error_code, Applicability, DiagCtxt, Diagnostic, StashKey};
 use rustc_lexer::unescape::{self, EscapeError, Mode};
 use rustc_lexer::{Base, DocStyle, RawStrError};
 use rustc_lexer::{Cursor, LiteralKind};
@@ -252,7 +250,7 @@ impl<'a> StringReader<'a> {
                     if starts_with_number {
                         let span = self.mk_sp(start, self.pos);
                         let mut diag = self.dcx().struct_err("lifetimes cannot start with a number");
-                        diag.set_span(span);
+                        diag.span(span);
                         diag.stash(span, StashKey::LifetimeIsChar);
                     }
                     let ident = Symbol::intern(lifetime_name);
@@ -344,18 +342,6 @@ impl<'a> StringReader<'a> {
         token::Ident(sym, false)
     }
 
-    fn struct_fatal_span_char(
-        &self,
-        from_pos: BytePos,
-        to_pos: BytePos,
-        m: &str,
-        c: char,
-    ) -> DiagnosticBuilder<'a, FatalAbort> {
-        self.sess
-            .dcx
-            .struct_span_fatal(self.mk_sp(from_pos, to_pos), format!("{}: {}", m, escaped_char(c)))
-    }
-
     /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly
     /// complain about it.
     fn lint_unicode_text_flow(&self, start: BytePos) {
@@ -568,13 +554,16 @@ impl<'a> StringReader<'a> {
     }
 
     fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
-        self.struct_fatal_span_char(
-            start,
-            self.pos,
-            "found invalid character; only `#` is allowed in raw string delimitation",
-            bad_char,
-        )
-        .emit()
+        self.sess
+            .dcx
+            .struct_span_fatal(
+                self.mk_sp(start, self.pos),
+                format!(
+                    "found invalid character; only `#` is allowed in raw string delimitation: {}",
+                    escaped_char(bad_char)
+                ),
+            )
+            .emit()
     }
 
     fn report_unterminated_raw_string(
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index f06aeed8628..579c3cffcfb 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -154,7 +154,7 @@ fn try_file_to_source_file(
         let msg = format!("couldn't read {}: {}", path.display(), e);
         let mut diag = Diagnostic::new(Level::Fatal, msg);
         if let Some(sp) = spanopt {
-            diag.set_span(sp);
+            diag.span(sp);
         }
         diag
     })
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index fb8ad05f25d..c8629069968 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -174,7 +174,7 @@ impl<'a> Parser<'a> {
         ) {
             Ok(Some(item)) => {
                 // FIXME(#100717)
-                err.set_arg("item", item.kind.descr());
+                err.arg("item", item.kind.descr());
                 err.span_label(item.span, fluent::parse_label_does_not_annotate_this);
                 err.span_suggestion_verbose(
                     replacement_span,
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 77bca2f138a..aed5e11133b 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -846,7 +846,7 @@ impl<'a> Parser<'a> {
             ) =>
             {
                 let n_hashes: u8 = *n_hashes;
-                err.set_primary_message("too many `#` when terminating raw string");
+                err.primary_message("too many `#` when terminating raw string");
                 let str_span = self.prev_token.span;
                 let mut span = self.token.span;
                 let mut count = 0;
@@ -857,7 +857,7 @@ impl<'a> Parser<'a> {
                     self.bump();
                     count += 1;
                 }
-                err.set_span(span);
+                err.span(span);
                 err.span_suggestion(
                     span,
                     format!("remove the extra `#`{}", pluralize!(count)),
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index bf6151b64d3..0b24e784126 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1023,7 +1023,7 @@ impl<'a> Parser<'a> {
     // we should break everything including floats into more basic proc-macro style
     // tokens in the lexer (probably preferable).
     // See also `TokenKind::break_two_token_op` which does similar splitting of `>>` into `>`.
-    fn break_up_float(&mut self, float: Symbol) -> DestructuredFloat {
+    fn break_up_float(&self, float: Symbol, span: Span) -> DestructuredFloat {
         #[derive(Debug)]
         enum FloatComponent {
             IdentLike(String),
@@ -1053,7 +1053,6 @@ impl<'a> Parser<'a> {
         // With proc macros the span can refer to anything, the source may be too short,
         // or too long, or non-ASCII. It only makes sense to break our span into components
         // if its underlying text is identical to our float literal.
-        let span = self.token.span;
         let can_take_span_apart =
             || self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref();
 
@@ -1115,7 +1114,7 @@ impl<'a> Parser<'a> {
         float: Symbol,
         suffix: Option<Symbol>,
     ) -> P<Expr> {
-        match self.break_up_float(float) {
+        match self.break_up_float(float, self.token.span) {
             // 1e2
             DestructuredFloat::Single(sym, _sp) => {
                 self.parse_expr_tuple_field_access(lo, base, sym, suffix, None)
@@ -1143,40 +1142,105 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_field_name_maybe_tuple(&mut self) -> PResult<'a, ThinVec<Ident>> {
-        let token::Literal(token::Lit { kind: token::Float, symbol, suffix }) = self.token.kind
-        else {
-            return Ok(thin_vec![self.parse_field_name()?]);
-        };
-        Ok(match self.break_up_float(symbol) {
-            // 1e2
-            DestructuredFloat::Single(sym, sp) => {
-                self.bump();
-                thin_vec![Ident::new(sym, sp)]
-            }
-            // 1.
-            DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => {
-                assert!(suffix.is_none());
-                // Analogous to `Self::break_and_eat`
-                self.break_last_token = true;
-                // This might work, in cases like `1. 2`, and might not,
-                // in cases like `offset_of!(Ty, 1.)`. It depends on what comes
-                // after the float-like token, and therefore we have to make
-                // the other parts of the parser think that there is a dot literal.
-                self.token = Token::new(token::Ident(sym, false), sym_span);
-                self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing));
-                thin_vec![Ident::new(sym, sym_span)]
-            }
-            // 1.2 | 1.2e3
-            DestructuredFloat::MiddleDot(symbol1, ident1_span, _dot_span, symbol2, ident2_span) => {
-                self.bump();
-                thin_vec![Ident::new(symbol1, ident1_span), Ident::new(symbol2, ident2_span)]
+    /// Parse the field access used in offset_of, matched by `$(e:expr)+`.
+    /// Currently returns a list of idents. However, it should be possible in
+    /// future to also do array indices, which might be arbitrary expressions.
+    fn parse_floating_field_access(&mut self) -> PResult<'a, P<[Ident]>> {
+        let mut fields = Vec::new();
+        let mut trailing_dot = None;
+
+        loop {
+            // This is expected to use a metavariable $(args:expr)+, but the builtin syntax
+            // could be called directly. Calling `parse_expr` allows this function to only
+            // consider `Expr`s.
+            let expr = self.parse_expr()?;
+            let mut current = &expr;
+            let start_idx = fields.len();
+            loop {
+                match current.kind {
+                    ExprKind::Field(ref left, right) => {
+                        // Field access is read right-to-left.
+                        fields.insert(start_idx, right);
+                        trailing_dot = None;
+                        current = left;
+                    }
+                    // Parse this both to give helpful error messages and to
+                    // verify it can be done with this parser setup.
+                    ExprKind::Index(ref left, ref _right, span) => {
+                        self.dcx().emit_err(errors::ArrayIndexInOffsetOf(span));
+                        current = left;
+                    }
+                    ExprKind::Lit(token::Lit {
+                        kind: token::Float | token::Integer,
+                        symbol,
+                        suffix,
+                    }) => {
+                        if let Some(suffix) = suffix {
+                            self.expect_no_tuple_index_suffix(current.span, suffix);
+                        }
+                        match self.break_up_float(symbol, current.span) {
+                            // 1e2
+                            DestructuredFloat::Single(sym, sp) => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(sym, sp));
+                            }
+                            // 1.
+                            DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => {
+                                assert!(suffix.is_none());
+                                trailing_dot = Some(dot_span);
+                                fields.insert(start_idx, Ident::new(sym, sym_span));
+                            }
+                            // 1.2 | 1.2e3
+                            DestructuredFloat::MiddleDot(
+                                symbol1,
+                                span1,
+                                _dot_span,
+                                symbol2,
+                                span2,
+                            ) => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(symbol2, span2));
+                                fields.insert(start_idx, Ident::new(symbol1, span1));
+                            }
+                            DestructuredFloat::Error => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(symbol, self.prev_token.span));
+                            }
+                        }
+                        break;
+                    }
+                    ExprKind::Path(None, Path { ref segments, .. }) => {
+                        match &segments[..] {
+                            [PathSegment { ident, args: None, .. }] => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, *ident)
+                            }
+                            _ => {
+                                self.dcx().emit_err(errors::InvalidOffsetOf(current.span));
+                                break;
+                            }
+                        }
+                        break;
+                    }
+                    _ => {
+                        self.dcx().emit_err(errors::InvalidOffsetOf(current.span));
+                        break;
+                    }
+                }
             }
-            DestructuredFloat::Error => {
-                self.bump();
-                thin_vec![Ident::new(symbol, self.prev_token.span)]
+
+            if matches!(self.token.kind, token::CloseDelim(..) | token::Comma) {
+                break;
+            } else if trailing_dot.is_none() {
+                // This loop should only repeat if there is a trailing dot.
+                self.dcx().emit_err(errors::InvalidOffsetOf(self.token.span));
+                break;
             }
-        })
+        }
+        if let Some(dot) = trailing_dot {
+            self.dcx().emit_err(errors::InvalidOffsetOf(dot));
+        }
+        Ok(fields.into_iter().collect())
     }
 
     fn parse_expr_tuple_field_access(
@@ -1907,15 +1971,29 @@ impl<'a> Parser<'a> {
         let container = self.parse_ty()?;
         self.expect(&TokenKind::Comma)?;
 
-        let seq_sep = SeqSep { sep: Some(token::Dot), trailing_sep_allowed: false };
-        let (fields, _trailing, _recovered) = self.parse_seq_to_before_end(
-            &TokenKind::CloseDelim(Delimiter::Parenthesis),
-            seq_sep,
-            Parser::parse_field_name_maybe_tuple,
-        )?;
-        let fields = fields.into_iter().flatten().collect::<Vec<_>>();
+        let fields = self.parse_floating_field_access()?;
+        let trailing_comma = self.eat_noexpect(&TokenKind::Comma);
+
+        if let Err(mut e) =
+            self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)])
+        {
+            if trailing_comma {
+                e.note("unexpected third argument to offset_of");
+            } else {
+                e.note("offset_of expects dot-separated field and variant names");
+            }
+            e.emit();
+        }
+
+        // Eat tokens until the macro call ends.
+        if self.may_recover() {
+            while !matches!(self.token.kind, token::CloseDelim(..) | token::Eof) {
+                self.bump();
+            }
+        }
+
         let span = lo.to(self.token.span);
-        Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields.into())))
+        Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields)))
     }
 
     /// Returns a string literal if the next token is a string literal.
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 3932d32a6e1..b201d36455e 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -925,9 +925,8 @@ impl<'a> Parser<'a> {
                 });
         }
 
-        expect_err.set_primary_message(
-            "closure bodies that contain statements must be surrounded by braces",
-        );
+        expect_err
+            .primary_message("closure bodies that contain statements must be surrounded by braces");
 
         let preceding_pipe_span = closure_spans.closing_pipe;
         let following_token_span = self.token.span;
@@ -951,7 +950,7 @@ impl<'a> Parser<'a> {
         );
         expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
 
-        expect_err.set_span(vec![preceding_pipe_span, following_token_span]);
+        expect_err.span(vec![preceding_pipe_span, following_token_span]);
 
         let opening_suggestion_str = " {".to_string();
         let closing_suggestion_str = "}".to_string();
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index afbc2537578..7d17b1d4c4d 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -6,7 +6,8 @@ use crate::errors::{
     InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern,
     PatternOnWrongSideOfAt, RefMutOrderIncorrect, RemoveLet, RepeatedMutInPattern,
     SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
-    TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam,
+    TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedParenInRangePat,
+    UnexpectedParenInRangePatSugg, UnexpectedVertVertBeforeFunctionParam,
     UnexpectedVertVertInPattern,
 };
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
@@ -579,6 +580,8 @@ impl<'a> Parser<'a> {
 
     /// Parse a tuple or parenthesis pattern.
     fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
+        let open_paren = self.token.span;
+
         let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
             p.parse_pat_allow_top_alt(
                 None,
@@ -591,7 +594,29 @@ impl<'a> Parser<'a> {
         // Here, `(pat,)` is a tuple pattern.
         // For backward compatibility, `(..)` is a tuple pattern as well.
         Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
-            PatKind::Paren(fields.into_iter().next().unwrap())
+            let pat = fields.into_iter().next().unwrap();
+            let close_paren = self.prev_token.span;
+
+            match &pat.kind {
+                // recover ranges with parentheses around the `(start)..`
+                PatKind::Lit(begin)
+                    if self.may_recover()
+                        && let Some(form) = self.parse_range_end() =>
+                {
+                    self.dcx().emit_err(UnexpectedParenInRangePat {
+                        span: vec![open_paren, close_paren],
+                        sugg: UnexpectedParenInRangePatSugg {
+                            start_span: open_paren,
+                            end_span: close_paren,
+                        },
+                    });
+
+                    self.parse_pat_range_begin_with(begin.clone(), form)?
+                }
+
+                // (pat) with optional parentheses
+                _ => PatKind::Paren(pat),
+            }
         } else {
             PatKind::Tuple(fields)
         })
@@ -794,11 +819,21 @@ impl<'a> Parser<'a> {
                 || t.can_begin_literal_maybe_minus() // e.g. `42`.
                 || t.is_whole_expr()
                 || t.is_lifetime() // recover `'a` instead of `'a'`
+                || (self.may_recover() // recover leading `(`
+                    && t.kind == token::OpenDelim(Delimiter::Parenthesis)
+                    && self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis))
+                    && self.is_pat_range_end_start(dist + 1))
             })
     }
 
+    /// Parse a range pattern end bound
     fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
-        if self.check_inline_const(0) {
+        // recover leading `(`
+        let open_paren = (self.may_recover()
+            && self.eat_noexpect(&token::OpenDelim(Delimiter::Parenthesis)))
+        .then_some(self.prev_token.span);
+
+        let bound = if self.check_inline_const(0) {
             self.parse_const_block(self.token.span, true)
         } else if self.check_path() {
             let lo = self.token.span;
@@ -814,7 +849,22 @@ impl<'a> Parser<'a> {
             Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
         } else {
             self.parse_literal_maybe_minus()
+        }?;
+
+        // recover trailing `)`
+        if let Some(open_paren) = open_paren {
+            self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+
+            self.dcx().emit_err(UnexpectedParenInRangePat {
+                span: vec![open_paren, self.prev_token.span],
+                sugg: UnexpectedParenInRangePatSugg {
+                    start_span: open_paren,
+                    end_span: self.prev_token.span,
+                },
+            });
         }
+
+        Ok(bound)
     }
 
     /// Is this the start of a pattern beginning with a path?