about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs41
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs2
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs2
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs16
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs2
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs46
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs14
7 files changed, 79 insertions, 44 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index c6e6b46e455..1931ee5e528 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -9,8 +9,8 @@ use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
 use rustc_errors::{error_code, Applicability, Diagnostic, DiagnosticBuilder, StashKey};
 use rustc_lexer::unescape::{self, EscapeError, Mode};
-use rustc_lexer::Cursor;
 use rustc_lexer::{Base, DocStyle, RawStrError};
+use rustc_lexer::{Cursor, LiteralKind};
 use rustc_session::lint::builtin::{
     RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
 };
@@ -118,6 +118,7 @@ impl<'a> StringReader<'a> {
         let mut swallow_next_invalid = 0;
         // Skip trivial (whitespace & comments) tokens
         loop {
+            let str_before = self.cursor.as_str();
             let token = self.cursor.advance_token();
             let start = self.pos;
             self.pos = self.pos + BytePos(token.len);
@@ -165,10 +166,7 @@ impl<'a> StringReader<'a> {
                     continue;
                 }
                 rustc_lexer::TokenKind::Ident => {
-                    let sym = nfc_normalize(self.str_from(start));
-                    let span = self.mk_sp(start, self.pos);
-                    self.sess.symbol_gallery.insert(sym, span);
-                    token::Ident(sym, false)
+                    self.ident(start)
                 }
                 rustc_lexer::TokenKind::RawIdent => {
                     let sym = nfc_normalize(self.str_from(start + BytePos(2)));
@@ -182,10 +180,7 @@ impl<'a> StringReader<'a> {
                 }
                 rustc_lexer::TokenKind::UnknownPrefix => {
                     self.report_unknown_prefix(start);
-                    let sym = nfc_normalize(self.str_from(start));
-                    let span = self.mk_sp(start, self.pos);
-                    self.sess.symbol_gallery.insert(sym, span);
-                    token::Ident(sym, false)
+                    self.ident(start)
                 }
                 rustc_lexer::TokenKind::InvalidIdent
                     // Do not recover an identifier with emoji if the codepoint is a confusable
@@ -203,6 +198,27 @@ impl<'a> StringReader<'a> {
                         .push(span);
                     token::Ident(sym, false)
                 }
+                // split up (raw) c string literals to an ident and a string literal when edition < 2021.
+                rustc_lexer::TokenKind::Literal {
+                    kind: kind @ (LiteralKind::CStr { .. } | LiteralKind::RawCStr { .. }),
+                    suffix_start: _,
+                } if !self.mk_sp(start, self.pos).edition().at_least_rust_2021() => {
+                    let prefix_len = match kind {
+                        LiteralKind::CStr { .. } => 1,
+                        LiteralKind::RawCStr { .. } => 2,
+                        _ => unreachable!(),
+                    };
+
+                    // reset the state so that only the prefix ("c" or "cr")
+                    // was consumed.
+                    let lit_start = start + BytePos(prefix_len);
+                    self.pos = lit_start;
+                    self.cursor = Cursor::new(&str_before[prefix_len as usize..]);
+
+                    self.report_unknown_prefix(start);
+                    let prefix_span = self.mk_sp(start, lit_start);
+                    return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
+                }
                 rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
                     let suffix_start = start + BytePos(suffix_start);
                     let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
@@ -317,6 +333,13 @@ impl<'a> StringReader<'a> {
         }
     }
 
+    fn ident(&self, start: BytePos) -> TokenKind {
+        let sym = nfc_normalize(self.str_from(start));
+        let span = self.mk_sp(start, self.pos);
+        self.sess.symbol_gallery.insert(sym, span);
+        token::Ident(sym, false)
+    }
+
     fn struct_fatal_span_char(
         &self,
         from_pos: BytePos,
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 461a34b67db..d1f852b1a40 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -27,7 +27,7 @@ pub(crate) fn emit_unescape_error(
         lit, span_with_quotes, mode, range, error
     );
     let last_char = || {
-        let c = lit[range.clone()].chars().rev().next().unwrap();
+        let c = lit[range.clone()].chars().next_back().unwrap();
         let span = span.with_lo(span.hi() - BytePos(c.len_utf8() as u32));
         (c, span)
     };
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index b579da098d8..4cc03664b47 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -107,7 +107,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
         let tokens =
             std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
                 .chain((0..self.num_calls).map(|_| {
-                    let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments);
+                    let token = cursor_snapshot.next();
                     (FlatToken::Token(token.0), token.1)
                 }))
                 .take(self.num_calls);
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 3ecdbc36248..b54cb8c5a0c 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -238,7 +238,7 @@ impl<'a> Parser<'a> {
                     _ => unreachable!(),
                 }
                 .into();
-                let invalid = format!("{}=", &sugg);
+                let invalid = format!("{sugg}=");
                 self.sess.emit_err(errors::InvalidComparisonOperator {
                     span: sp,
                     invalid: invalid.clone(),
@@ -3003,7 +3003,8 @@ impl<'a> Parser<'a> {
     fn is_do_catch_block(&self) -> bool {
         self.token.is_keyword(kw::Do)
             && self.is_keyword_ahead(1, &[kw::Catch])
-            && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
+            && self
+                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
             && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
@@ -3013,7 +3014,8 @@ impl<'a> Parser<'a> {
 
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try)
-            && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
+            && self
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
             && self.token.uninterpolated_span().at_least_rust_2018()
     }
 
@@ -3032,10 +3034,14 @@ impl<'a> Parser<'a> {
             && ((
                 // `async move {`
                 self.is_keyword_ahead(1, &[kw::Move])
-                    && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
+                    && self.look_ahead(2, |t| {
+                        *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                    })
             ) || (
                 // `async {`
-                self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
+                self.look_ahead(1, |t| {
+                    *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                })
             ))
     }
 
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 8ab38c4fb8b..242c9d332bb 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -49,7 +49,7 @@ impl<'a> Parser<'a> {
             && self.check_ident()
         // `Const` followed by IDENT
         {
-            return Ok(self.recover_const_param_with_mistyped_const(preceding_attrs, ident)?);
+            return self.recover_const_param_with_mistyped_const(preceding_attrs, ident);
         }
 
         // Parse optional colon and param bounds.
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 2e1a61e634e..37b4c371c94 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -138,7 +138,6 @@ pub struct Parser<'a> {
     // Important: This must only be advanced from `bump` to ensure that
     // `token_cursor.num_next_calls` is updated properly.
     token_cursor: TokenCursor,
-    desugar_doc_comments: bool,
     /// This field is used to keep track of how many left angle brackets we have seen. This is
     /// required in order to detect extra leading left angle brackets (`<` characters) and error
     /// appropriately.
@@ -225,6 +224,9 @@ struct TokenCursor {
     // because it's the outermost token stream which never has delimiters.
     stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
 
+    // We need to desugar doc comments from `/// foo` form into `#[doc =
+    // r"foo"]` form when parsing declarative macro inputs in `parse_tt`,
+    // because some declarative macros look for `doc` attributes.
     desugar_doc_comments: bool,
 
     // Counts the number of calls to `{,inlined_}next`.
@@ -255,33 +257,38 @@ struct TokenCursor {
 }
 
 impl TokenCursor {
-    fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
-        self.inlined_next(desugar_doc_comments)
+    fn next(&mut self) -> (Token, Spacing) {
+        self.inlined_next()
     }
 
     /// This always-inlined version should only be used on hot code paths.
     #[inline(always)]
-    fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
+    fn inlined_next(&mut self) -> (Token, Spacing) {
         loop {
             // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
             // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
             // removed.
             if let Some(tree) = self.tree_cursor.next_ref() {
                 match tree {
-                    &TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
-                        (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
-                            let desugared = self.desugar(attr_style, data, span);
-                            self.tree_cursor.replace_prev_and_rewind(desugared);
-                            // Continue to get the first token of the desugared doc comment.
-                        }
-                        _ => {
-                            debug_assert!(!matches!(
-                                token.kind,
-                                token::OpenDelim(_) | token::CloseDelim(_)
-                            ));
-                            return (token.clone(), spacing);
+                    &TokenTree::Token(ref token, spacing) => {
+                        match (self.desugar_doc_comments, token) {
+                            (
+                                true,
+                                &Token { kind: token::DocComment(_, attr_style, data), span },
+                            ) => {
+                                let desugared = self.desugar(attr_style, data, span);
+                                self.tree_cursor.replace_prev_and_rewind(desugared);
+                                // Continue to get the first token of the desugared doc comment.
+                            }
+                            _ => {
+                                debug_assert!(!matches!(
+                                    token.kind,
+                                    token::OpenDelim(_) | token::CloseDelim(_)
+                                ));
+                                return (token.clone(), spacing);
+                            }
                         }
-                    },
+                    }
                     &TokenTree::Delimited(sp, delim, ref tts) => {
                         let trees = tts.clone().into_trees();
                         self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
@@ -463,7 +470,6 @@ impl<'a> Parser<'a> {
                 desugar_doc_comments,
                 break_last_token: false,
             },
-            desugar_doc_comments,
             unmatched_angle_bracket_count: 0,
             max_angle_bracket_count: 0,
             last_unexpected_token_span: None,
@@ -1107,7 +1113,7 @@ impl<'a> Parser<'a> {
     pub fn bump(&mut self) {
         // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
         // than `.0`/`.1` access.
-        let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments);
+        let mut next = self.token_cursor.inlined_next();
         self.token_cursor.num_next_calls += 1;
         // We've retrieved an token from the underlying
         // cursor, so we no longer need to worry about
@@ -1157,7 +1163,7 @@ impl<'a> Parser<'a> {
         let mut i = 0;
         let mut token = Token::dummy();
         while i < dist {
-            token = cursor.next(/* desugar_doc_comments */ false).0;
+            token = cursor.next().0;
             if matches!(
                 token.kind,
                 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 928fdce313d..2011083019c 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -157,15 +157,15 @@ fn emit_malformed_attribute(
         matches!(name, sym::doc | sym::ignore | sym::inline | sym::link | sym::test | sym::bench)
     };
 
-    let error_msg = format!("malformed `{}` attribute input", name);
+    let error_msg = format!("malformed `{name}` attribute input");
     let mut msg = "attribute must be of the form ".to_owned();
     let mut suggestions = vec![];
     let mut first = true;
     let inner = if style == ast::AttrStyle::Inner { "!" } else { "" };
     if template.word {
         first = false;
-        let code = format!("#{}[{}]", inner, name);
-        msg.push_str(&format!("`{}`", &code));
+        let code = format!("#{inner}[{name}]");
+        msg.push_str(&format!("`{code}`"));
         suggestions.push(code);
     }
     if let Some(descr) = template.list {
@@ -173,16 +173,16 @@ fn emit_malformed_attribute(
             msg.push_str(" or ");
         }
         first = false;
-        let code = format!("#{}[{}({})]", inner, name, descr);
-        msg.push_str(&format!("`{}`", &code));
+        let code = format!("#{inner}[{name}({descr})]");
+        msg.push_str(&format!("`{code}`"));
         suggestions.push(code);
     }
     if let Some(descr) = template.name_value_str {
         if !first {
             msg.push_str(" or ");
         }
-        let code = format!("#{}[{} = \"{}\"]", inner, name, descr);
-        msg.push_str(&format!("`{}`", &code));
+        let code = format!("#{inner}[{name} = \"{descr}\"]");
+        msg.push_str(&format!("`{code}`"));
         suggestions.push(code);
     }
     if should_warn(name) {