about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2019-05-25 04:10:07 +0000
committerbors <bors@rust-lang.org>2019-05-25 04:10:07 +0000
commit315ab95a9c13cbb69ae8538fcd69b9f7b0c30f89 (patch)
tree11c1b9e9c76d0de8760e02fdca7690e9b8bfd0de /src/libsyntax/parse
parent524580312039e4fa5ccf91e8f7093cd755bc1aad (diff)
parent19b5a103461c7bce5d53db64380360a684c1ce7d (diff)
downloadrust-315ab95a9c13cbb69ae8538fcd69b9f7b0c30f89.tar.gz
rust-315ab95a9c13cbb69ae8538fcd69b9f7b0c30f89.zip
Auto merge of #61150 - Centril:rollup-wmm7qga, r=Centril
Rollup of 13 pull requests

Successful merges:

 - #61026 (Tweak macro parse errors when reaching EOF during macro call parse)
 - #61095 (Update cargo)
 - #61096 (tidy: don't short-circuit on license error)
 - #61107 (Fix a couple docs typos)
 - #61110 (Revert edition-guide toolstate override)
 - #61111 (Fixed type-alias-bounds lint doc)
 - #61113 (Deprecate `FnBox`. `Box<dyn FnOnce()>` can be called directly, since 1.35)
 - #61116 (Remove the incorrect warning from README.md)
 - #61118 (Dont ICE on an attempt to use GAT without feature gate)
 - #61121 (improve debug-printing of scalars)
 - #61125 (Updated my mailmap entry)
 - #61134 (Annotate each `reverse_bits` with `#[must_use]`)
 - #61138 (Move async/await tests to their own folder)

Failed merges:

r? @ghost
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/diagnostics.rs75
-rw-r--r--src/libsyntax/parse/mod.rs22
-rw-r--r--src/libsyntax/parse/parser.rs84
3 files changed, 111 insertions, 70 deletions
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index 8ac5beb21b5..810acc9cc92 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -13,7 +13,7 @@ use crate::symbol::kw;
 use crate::ThinVec;
 use errors::{Applicability, DiagnosticBuilder};
 use log::debug;
-use syntax_pos::Span;
+use syntax_pos::{Span, DUMMY_SP};
 
 pub trait RecoverQPath: Sized + 'static {
     const PATH_STYLE: PathStyle = PathStyle::Expr;
@@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
 
         let mut path = ast::Path {
             segments: Vec::new(),
-            span: syntax_pos::DUMMY_SP,
+            span: DUMMY_SP,
         };
         self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
         path.span = ty_span.to(self.prev_span);
@@ -267,6 +267,58 @@ impl<'a> Parser<'a> {
         }
     }
 
+    /// Create a `DiagnosticBuilder` for an unexpected token `t` and try to recover if it is a
+    /// closing delimiter.
+    pub fn unexpected_try_recover(
+        &mut self,
+        t: &token::Token,
+    ) -> PResult<'a, bool /* recovered */> {
+        let token_str = pprust::token_to_string(t);
+        let this_token_str = self.this_token_descr();
+        let (prev_sp, sp) = match (&self.token, self.subparser_name) {
+            // Point at the end of the macro call when reaching end of macro arguments.
+            (token::Token::Eof, Some(_)) => {
+                let sp = self.sess.source_map().next_point(self.span);
+                (sp, sp)
+            }
+            // We don't want to point at the following span after DUMMY_SP.
+            // This happens when the parser finds an empty TokenStream.
+            _ if self.prev_span == DUMMY_SP => (self.span, self.span),
+            // EOF, don't want to point at the following char, but rather the last token.
+            (token::Token::Eof, None) => (self.prev_span, self.span),
+            _ => (self.sess.source_map().next_point(self.prev_span), self.span),
+        };
+        let msg = format!(
+            "expected `{}`, found {}",
+            token_str,
+            match (&self.token, self.subparser_name) {
+                (token::Token::Eof, Some(origin)) => format!("end of {}", origin),
+                _ => this_token_str,
+            },
+        );
+        let mut err = self.struct_span_err(sp, &msg);
+        let label_exp = format!("expected `{}`", token_str);
+        match self.recover_closing_delimiter(&[t.clone()], err) {
+            Err(e) => err = e,
+            Ok(recovered) => {
+                return Ok(recovered);
+            }
+        }
+        let cm = self.sess.source_map();
+        match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
+            (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+                // When the spans are in the same line, it means that the only content
+                // between them is whitespace, point only at the found token.
+                err.span_label(sp, label_exp);
+            }
+            _ => {
+                err.span_label(prev_sp, label_exp);
+                err.span_label(sp, "unexpected token");
+            }
+        }
+        Err(err)
+    }
+
     /// Consume alternative await syntaxes like `await <expr>`, `await? <expr>`, `await(<expr>)`
     /// and `await { <expr> }`.
     crate fn parse_incorrect_await_syntax(
@@ -562,4 +614,23 @@ impl<'a> Parser<'a> {
         }
     }
 
+    crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
+        let (span, msg) = match (&self.token, self.subparser_name) {
+            (&token::Token::Eof, Some(origin)) => {
+                let sp = self.sess.source_map().next_point(self.span);
+                (sp, format!("expected expression, found end of {}", origin))
+            }
+            _ => (self.span, format!(
+                "expected expression, found {}",
+                self.this_token_descr(),
+            )),
+        };
+        let mut err = self.struct_span_err(span, &msg);
+        let sp = self.sess.source_map().start_point(self.span);
+        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            self.sess.expr_parentheses_needed(&mut err, *sp, None);
+        }
+        err.span_label(span, "expected expression");
+        err
+    }
 }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 1073fc6f3ab..f7a7aba9ecb 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -236,7 +236,7 @@ fn maybe_source_file_to_parser(
 ) -> Result<Parser<'_>, Vec<Diagnostic>> {
     let end_pos = source_file.end_pos;
     let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
-    let mut parser = stream_to_parser(sess, stream);
+    let mut parser = stream_to_parser(sess, stream, None);
     parser.unclosed_delims = unclosed_delims;
     if parser.token == token::Eof && parser.span.is_dummy() {
         parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
@@ -248,7 +248,7 @@ fn maybe_source_file_to_parser(
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
 pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
-    stream_to_parser(sess, tts.into_iter().collect())
+    stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS)
 }
 
 
@@ -328,8 +328,12 @@ pub fn maybe_file_to_stream(
 }
 
 /// Given stream and the `ParseSess`, produces a parser.
-pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
-    Parser::new(sess, stream, None, true, false)
+pub fn stream_to_parser<'a>(
+    sess: &'a ParseSess,
+    stream: TokenStream,
+    subparser_name: Option<&'static str>,
+) -> Parser<'a> {
+    Parser::new(sess, stream, None, true, false, subparser_name)
 }
 
 /// Given stream, the `ParseSess` and the base directory, produces a parser.
@@ -343,10 +347,12 @@ pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
 /// The main usage of this function is outside of rustc, for those who uses
 /// libsyntax as a library. Please do not remove this function while refactoring
 /// just because it is not used in rustc codebase!
-pub fn stream_to_parser_with_base_dir<'a>(sess: &'a ParseSess,
-                                          stream: TokenStream,
-                                          base_dir: Directory<'a>) -> Parser<'a> {
-    Parser::new(sess, stream, Some(base_dir), true, false)
+pub fn stream_to_parser_with_base_dir<'a>(
+    sess: &'a ParseSess,
+    stream: TokenStream,
+    base_dir: Directory<'a>,
+) -> Parser<'a> {
+    Parser::new(sess, stream, Some(base_dir), true, false, None)
 }
 
 /// A sequence separator.
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 11c566b65e5..56951ae0801 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -51,7 +51,7 @@ use crate::symbol::{kw, sym, Symbol};
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
 use rustc_target::spec::abi::{self, Abi};
 use syntax_pos::{
-    Span, MultiSpan, BytePos, FileName,
+    BytePos, DUMMY_SP, FileName, MultiSpan, Span,
     hygiene::CompilerDesugaringKind,
 };
 use log::{debug, trace};
@@ -233,6 +233,8 @@ pub struct Parser<'a> {
     /// error.
     crate unclosed_delims: Vec<UnmatchedBrace>,
     last_unexpected_token_span: Option<Span>,
+    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
+    crate subparser_name: Option<&'static str>,
 }
 
 impl<'a> Drop for Parser<'a> {
@@ -309,7 +311,7 @@ impl TokenCursor {
                 self.frame = frame;
                 continue
             } else {
-                return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP }
+                return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
             };
 
             match self.frame.last_token {
@@ -533,17 +535,19 @@ enum TokenExpectType {
 }
 
 impl<'a> Parser<'a> {
-    pub fn new(sess: &'a ParseSess,
-               tokens: TokenStream,
-               directory: Option<Directory<'a>>,
-               recurse_into_file_modules: bool,
-               desugar_doc_comments: bool)
-               -> Self {
+    pub fn new(
+        sess: &'a ParseSess,
+        tokens: TokenStream,
+        directory: Option<Directory<'a>>,
+        recurse_into_file_modules: bool,
+        desugar_doc_comments: bool,
+        subparser_name: Option<&'static str>,
+    ) -> Self {
         let mut parser = Parser {
             sess,
             token: token::Whitespace,
-            span: syntax_pos::DUMMY_SP,
-            prev_span: syntax_pos::DUMMY_SP,
+            span: DUMMY_SP,
+            prev_span: DUMMY_SP,
             meta_var_span: None,
             prev_token_kind: PrevTokenKind::Other,
             restrictions: Restrictions::empty(),
@@ -568,6 +572,7 @@ impl<'a> Parser<'a> {
             max_angle_bracket_count: 0,
             unclosed_delims: Vec::new(),
             last_unexpected_token_span: None,
+            subparser_name,
         };
 
         let tok = parser.next_tok();
@@ -631,44 +636,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  bool /* recovered */> {
+    pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
                 Ok(false)
             } else {
-                let token_str = pprust::token_to_string(t);
-                let this_token_str = self.this_token_descr();
-                let mut err = self.fatal(&format!("expected `{}`, found {}",
-                                                  token_str,
-                                                  this_token_str));
-
-                let sp = if self.token == token::Token::Eof {
-                    // EOF, don't want to point at the following char, but rather the last token
-                    self.prev_span
-                } else {
-                    self.sess.source_map().next_point(self.prev_span)
-                };
-                let label_exp = format!("expected `{}`", token_str);
-                match self.recover_closing_delimiter(&[t.clone()], err) {
-                    Err(e) => err = e,
-                    Ok(recovered) => {
-                        return Ok(recovered);
-                    }
-                }
-                let cm = self.sess.source_map();
-                match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
-                    (Ok(ref a), Ok(ref b)) if a.line == b.line => {
-                        // When the spans are in the same line, it means that the only content
-                        // between them is whitespace, point only at the found token.
-                        err.span_label(self.span, label_exp);
-                    }
-                    _ => {
-                        err.span_label(sp, label_exp);
-                        err.span_label(self.span, "unexpected token");
-                    }
-                }
-                Err(err)
+                self.unexpected_try_recover(t)
             }
         } else {
             self.expect_one_of(slice::from_ref(t), &[])
@@ -812,7 +786,7 @@ impl<'a> Parser<'a> {
                     //   |                   expected one of 8 possible tokens here
                     err.span_label(self.span, label_exp);
                 }
-                _ if self.prev_span == syntax_pos::DUMMY_SP => {
+                _ if self.prev_span == DUMMY_SP => {
                     // Account for macro context where the previous span might not be
                     // available to avoid incorrect output (#54841).
                     err.span_label(self.span, "unexpected token");
@@ -2041,7 +2015,7 @@ impl<'a> Parser<'a> {
             path = self.parse_path(PathStyle::Type)?;
             path_span = path_lo.to(self.prev_span);
         } else {
-            path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
+            path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
             path_span = self.span.to(self.span);
         }
 
@@ -2627,17 +2601,7 @@ impl<'a> Parser<'a> {
                         }
                         Err(mut err) => {
                             self.cancel(&mut err);
-                            let msg = format!("expected expression, found {}",
-                                              self.this_token_descr());
-                            let mut err = self.fatal(&msg);
-                            let sp = self.sess.source_map().start_point(self.span);
-                            if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
-                                .get(&sp)
-                            {
-                                self.sess.expr_parentheses_needed(&mut err, *sp, None);
-                            }
-                            err.span_label(self.span, "expected expression");
-                            return Err(err);
+                            return Err(self.expected_expression_found());
                         }
                     }
                 }
@@ -5592,7 +5556,7 @@ impl<'a> Parser<'a> {
                 where_clause: WhereClause {
                     id: ast::DUMMY_NODE_ID,
                     predicates: Vec::new(),
-                    span: syntax_pos::DUMMY_SP,
+                    span: DUMMY_SP,
                 },
                 span: span_lo.to(self.prev_span),
             })
@@ -5838,7 +5802,7 @@ impl<'a> Parser<'a> {
         let mut where_clause = WhereClause {
             id: ast::DUMMY_NODE_ID,
             predicates: Vec::new(),
-            span: syntax_pos::DUMMY_SP,
+            span: DUMMY_SP,
         };
 
         if !self.eat_keyword(kw::Where) {
@@ -7005,7 +6969,7 @@ impl<'a> Parser<'a> {
                             Ident::with_empty_ctxt(sym::warn_directory_ownership)),
                         tokens: TokenStream::empty(),
                         is_sugared_doc: false,
-                        span: syntax_pos::DUMMY_SP,
+                        span: DUMMY_SP,
                     };
                     attr::mark_known(&attr);
                     attrs.push(attr);
@@ -7013,7 +6977,7 @@ impl<'a> Parser<'a> {
                 Ok((id, ItemKind::Mod(module), Some(attrs)))
             } else {
                 let placeholder = ast::Mod {
-                    inner: syntax_pos::DUMMY_SP,
+                    inner: DUMMY_SP,
                     items: Vec::new(),
                     inline: false
                 };