about summary refs log tree commit diff
path: root/src/libsyntax/parse/parser.rs
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2019-05-25 04:10:07 +0000
committerbors <bors@rust-lang.org>2019-05-25 04:10:07 +0000
commit315ab95a9c13cbb69ae8538fcd69b9f7b0c30f89 (patch)
tree11c1b9e9c76d0de8760e02fdca7690e9b8bfd0de /src/libsyntax/parse/parser.rs
parent524580312039e4fa5ccf91e8f7093cd755bc1aad (diff)
parent19b5a103461c7bce5d53db64380360a684c1ce7d (diff)
downloadrust-315ab95a9c13cbb69ae8538fcd69b9f7b0c30f89.tar.gz
rust-315ab95a9c13cbb69ae8538fcd69b9f7b0c30f89.zip
Auto merge of #61150 - Centril:rollup-wmm7qga, r=Centril
Rollup of 13 pull requests

Successful merges:

 - #61026 (Tweak macro parse errors when reaching EOF during macro call parse)
 - #61095 (Update cargo)
 - #61096 (tidy: don't short-circuit on license error)
 - #61107 (Fix a couple docs typos)
 - #61110 (Revert edition-guide toolstate override)
 - #61111 (Fixed type-alias-bounds lint doc)
 - #61113 (Deprecate `FnBox`. `Box<dyn FnOnce()>` can be called directly, since 1.35)
 - #61116 (Remove the incorrect warning from README.md)
 - #61118 (Dont ICE on an attempt to use GAT without feature gate)
 - #61121 (improve debug-printing of scalars)
 - #61125 (Updated my mailmap entry)
 - #61134 (Annotate each `reverse_bits` with `#[must_use]`)
 - #61138 (Move async/await tests to their own folder)

Failed merges:

r? @ghost
Diffstat (limited to 'src/libsyntax/parse/parser.rs')
-rw-r--r--src/libsyntax/parse/parser.rs84
1 files changed, 24 insertions, 60 deletions
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 11c566b65e5..56951ae0801 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -51,7 +51,7 @@ use crate::symbol::{kw, sym, Symbol};
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
 use rustc_target::spec::abi::{self, Abi};
 use syntax_pos::{
-    Span, MultiSpan, BytePos, FileName,
+    BytePos, DUMMY_SP, FileName, MultiSpan, Span,
     hygiene::CompilerDesugaringKind,
 };
 use log::{debug, trace};
@@ -233,6 +233,8 @@ pub struct Parser<'a> {
     /// error.
     crate unclosed_delims: Vec<UnmatchedBrace>,
     last_unexpected_token_span: Option<Span>,
+    /// If present, this `Parser` is not parsing Rust code but rather a macro call.
+    crate subparser_name: Option<&'static str>,
 }
 
 impl<'a> Drop for Parser<'a> {
@@ -309,7 +311,7 @@ impl TokenCursor {
                 self.frame = frame;
                 continue
             } else {
-                return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP }
+                return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
             };
 
             match self.frame.last_token {
@@ -533,17 +535,19 @@ enum TokenExpectType {
 }
 
 impl<'a> Parser<'a> {
-    pub fn new(sess: &'a ParseSess,
-               tokens: TokenStream,
-               directory: Option<Directory<'a>>,
-               recurse_into_file_modules: bool,
-               desugar_doc_comments: bool)
-               -> Self {
+    pub fn new(
+        sess: &'a ParseSess,
+        tokens: TokenStream,
+        directory: Option<Directory<'a>>,
+        recurse_into_file_modules: bool,
+        desugar_doc_comments: bool,
+        subparser_name: Option<&'static str>,
+    ) -> Self {
         let mut parser = Parser {
             sess,
             token: token::Whitespace,
-            span: syntax_pos::DUMMY_SP,
-            prev_span: syntax_pos::DUMMY_SP,
+            span: DUMMY_SP,
+            prev_span: DUMMY_SP,
             meta_var_span: None,
             prev_token_kind: PrevTokenKind::Other,
             restrictions: Restrictions::empty(),
@@ -568,6 +572,7 @@ impl<'a> Parser<'a> {
             max_angle_bracket_count: 0,
             unclosed_delims: Vec::new(),
             last_unexpected_token_span: None,
+            subparser_name,
         };
 
         let tok = parser.next_tok();
@@ -631,44 +636,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  bool /* recovered */> {
+    pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
                 Ok(false)
             } else {
-                let token_str = pprust::token_to_string(t);
-                let this_token_str = self.this_token_descr();
-                let mut err = self.fatal(&format!("expected `{}`, found {}",
-                                                  token_str,
-                                                  this_token_str));
-
-                let sp = if self.token == token::Token::Eof {
-                    // EOF, don't want to point at the following char, but rather the last token
-                    self.prev_span
-                } else {
-                    self.sess.source_map().next_point(self.prev_span)
-                };
-                let label_exp = format!("expected `{}`", token_str);
-                match self.recover_closing_delimiter(&[t.clone()], err) {
-                    Err(e) => err = e,
-                    Ok(recovered) => {
-                        return Ok(recovered);
-                    }
-                }
-                let cm = self.sess.source_map();
-                match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
-                    (Ok(ref a), Ok(ref b)) if a.line == b.line => {
-                        // When the spans are in the same line, it means that the only content
-                        // between them is whitespace, point only at the found token.
-                        err.span_label(self.span, label_exp);
-                    }
-                    _ => {
-                        err.span_label(sp, label_exp);
-                        err.span_label(self.span, "unexpected token");
-                    }
-                }
-                Err(err)
+                self.unexpected_try_recover(t)
             }
         } else {
             self.expect_one_of(slice::from_ref(t), &[])
@@ -812,7 +786,7 @@ impl<'a> Parser<'a> {
                     //   |                   expected one of 8 possible tokens here
                     err.span_label(self.span, label_exp);
                 }
-                _ if self.prev_span == syntax_pos::DUMMY_SP => {
+                _ if self.prev_span == DUMMY_SP => {
                     // Account for macro context where the previous span might not be
                     // available to avoid incorrect output (#54841).
                     err.span_label(self.span, "unexpected token");
@@ -2041,7 +2015,7 @@ impl<'a> Parser<'a> {
             path = self.parse_path(PathStyle::Type)?;
             path_span = path_lo.to(self.prev_span);
         } else {
-            path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
+            path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
             path_span = self.span.to(self.span);
         }
 
@@ -2627,17 +2601,7 @@ impl<'a> Parser<'a> {
                         }
                         Err(mut err) => {
                             self.cancel(&mut err);
-                            let msg = format!("expected expression, found {}",
-                                              self.this_token_descr());
-                            let mut err = self.fatal(&msg);
-                            let sp = self.sess.source_map().start_point(self.span);
-                            if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
-                                .get(&sp)
-                            {
-                                self.sess.expr_parentheses_needed(&mut err, *sp, None);
-                            }
-                            err.span_label(self.span, "expected expression");
-                            return Err(err);
+                            return Err(self.expected_expression_found());
                         }
                     }
                 }
@@ -5592,7 +5556,7 @@ impl<'a> Parser<'a> {
                 where_clause: WhereClause {
                     id: ast::DUMMY_NODE_ID,
                     predicates: Vec::new(),
-                    span: syntax_pos::DUMMY_SP,
+                    span: DUMMY_SP,
                 },
                 span: span_lo.to(self.prev_span),
             })
@@ -5838,7 +5802,7 @@ impl<'a> Parser<'a> {
         let mut where_clause = WhereClause {
             id: ast::DUMMY_NODE_ID,
             predicates: Vec::new(),
-            span: syntax_pos::DUMMY_SP,
+            span: DUMMY_SP,
         };
 
         if !self.eat_keyword(kw::Where) {
@@ -7005,7 +6969,7 @@ impl<'a> Parser<'a> {
                             Ident::with_empty_ctxt(sym::warn_directory_ownership)),
                         tokens: TokenStream::empty(),
                         is_sugared_doc: false,
-                        span: syntax_pos::DUMMY_SP,
+                        span: DUMMY_SP,
                     };
                     attr::mark_known(&attr);
                     attrs.push(attr);
@@ -7013,7 +6977,7 @@ impl<'a> Parser<'a> {
                 Ok((id, ItemKind::Mod(module), Some(attrs)))
             } else {
                 let placeholder = ast::Mod {
-                    inner: syntax_pos::DUMMY_SP,
+                    inner: DUMMY_SP,
                     items: Vec::new(),
                     inline: false
                 };