diff options
| author | Esteban Küber <esteban@kuber.com.ar> | 2019-01-27 21:04:50 -0800 |
|---|---|---|
| committer | Esteban Küber <esteban@kuber.com.ar> | 2019-02-07 01:41:30 -0800 |
| commit | 7451cd8dc0b20c012fad1886b321c3dee2052577 (patch) | |
| tree | 564bb3390199f080413ec319481f3f37fa345054 /src/libsyntax/parse | |
| parent | 825f355c7483746f3a17166f34dfabe3b2df1741 (diff) | |
| download | rust-7451cd8dc0b20c012fad1886b321c3dee2052577.tar.gz rust-7451cd8dc0b20c012fad1886b321c3dee2052577.zip | |
Deduplicate mismatched delimiter errors
Delay unmatched delimiter errors until after the parser has run to deduplicate them when parsing and attempt recovering intelligently.
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 11 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 30 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 57 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 261 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 30 |
5 files changed, 287 insertions, 102 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2e3233c8ed8..d3fc1c03634 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -33,6 +33,15 @@ impl Default for TokenAndSpan { } } +#[derive(Clone, Debug)] +pub struct UnmatchedBrace { + pub expected_delim: token::DelimToken, + pub found_delim: token::DelimToken, + pub found_span: Span, + pub unclosed_span: Option<Span>, + pub candidate_span: Option<Span>, +} + pub struct StringReader<'a> { pub sess: &'a ParseSess, /// The absolute offset within the source_map of the next character to read @@ -58,6 +67,7 @@ pub struct StringReader<'a> { span_src_raw: Span, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, + crate unmatched_braces: Vec<UnmatchedBrace>, /// The type and spans for all braces /// /// Used only for error recovery when arriving to EOF with mismatched braces. @@ -222,6 +232,7 @@ impl<'a> StringReader<'a> { span: syntax_pos::DUMMY_SP, span_src_raw: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), override_span, last_unclosed_found_span: None, diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 7699d9eab22..0db36c84cdf 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,5 +1,5 @@ use crate::print::pprust::token_to_string; -use crate::parse::lexer::StringReader; +use crate::parse::lexer::{StringReader, UnmatchedBrace}; use crate::parse::{token, PResult}; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; @@ -101,38 +101,38 @@ impl<'a> StringReader<'a> { } // Incorrect delimiter. token::CloseDelim(other) => { - let token_str = token_to_string(&self.token); + let mut unclosed_delimiter = None; + let mut candidate = None; if self.last_unclosed_found_span != Some(self.span) { // do not complain about the same unclosed delimiter multiple times self.last_unclosed_found_span = Some(self.span); - let msg = format!("incorrect close delimiter: `{}`", token_str); - let mut err = self.sess.span_diagnostic.struct_span_err( - self.span, - &msg, - ); - err.span_label(self.span, "incorrect close delimiter"); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { - err.span_label(sp, "un-closed delimiter"); + unclosed_delimiter = Some(sp); }; if let Some(current_padding) = sm.span_to_margin(self.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding if current_padding == padding && brace == &other { - err.span_label( - *brace_span, - "close delimiter possibly meant for this", - ); + candidate = Some(*brace_span); } } } } - err.emit(); + let (tok, _) = self.open_braces.pop().unwrap(); + self.unmatched_braces.push(UnmatchedBrace { + expected_delim: tok, + found_delim: other, + found_span: self.span, + unclosed_span: unclosed_delimiter, + candidate_span: candidate, + }); + } else { + self.open_braces.pop(); } - self.open_braces.pop().unwrap(); // If the incorrect delimiter matches an earlier opening // delimiter, then don't consume it (it can be used to diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c723d591f2f..52c7e774ab6 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -9,6 +9,7 @@ use crate::parse::parser::Parser; use crate::symbol::Symbol; use crate::tokenstream::{TokenStream, TokenTree}; use crate::diagnostics::plugin::ErrorMap; +use crate::print::pprust::token_to_string; use rustc_data_structures::sync::{Lrc, Lock}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; @@ -136,15 +137,17 @@ pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: & new_parser_from_source_str(sess, name, source).parse_inner_attributes() } -pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, - override_span: Option<Span>) - -> TokenStream { +pub fn parse_stream_from_source_str( + name: FileName, + source: String, + sess: &ParseSess, + override_span: Option<Span>, +) -> (TokenStream, Vec<lexer::UnmatchedBrace>) { source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } /// Create a new parser from a source string -pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) - -> Parser<'_> { +pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } @@ -195,12 +198,14 @@ fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Pars /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the /// initial token stream. -fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) - -> Result<Parser<'_>, Vec<Diagnostic>> -{ +fn maybe_source_file_to_parser( + sess: &ParseSess, + source_file: Lrc<SourceFile>, +) -> Result<Parser<'_>, Vec<Diagnostic>> { let end_pos = source_file.end_pos; - let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?); - + let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; + let mut parser = stream_to_parser(sess, stream); + parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } @@ -247,25 +252,43 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) } /// Given a source_file, produce a sequence of token-trees -pub fn source_file_to_stream(sess: &ParseSess, - source_file: Lrc<SourceFile>, - override_span: Option<Span>) -> TokenStream { +pub fn source_file_to_stream( + sess: &ParseSess, + source_file: Lrc<SourceFile>, + override_span: Option<Span>, +) -> (TokenStream, Vec<lexer::UnmatchedBrace>) { panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } /// Given a source file, produce a sequence of token-trees. Returns any buffered errors from /// parsing the token tream. -pub fn maybe_file_to_stream(sess: &ParseSess, - source_file: Lrc<SourceFile>, - override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> { +pub fn maybe_file_to_stream( + sess: &ParseSess, + source_file: Lrc<SourceFile>, + override_span: Option<Span>, +) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> { let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; srdr.real_token(); match srdr.parse_all_token_trees() { - Ok(stream) => Ok(stream), + Ok(stream) => Ok((stream, srdr.unmatched_braces)), Err(err) => { let mut buffer = Vec::with_capacity(1); err.buffer(&mut buffer); + for unmatched in srdr.unmatched_braces { + let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + db.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + db.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + db.span_label(sp, "un-closed delimiter"); + } + db.buffer(&mut buffer); + } Err(buffer) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index cacdab980fa..2e605ab6544 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -35,7 +35,7 @@ use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use crate::parse::{self, SeqSep, classify, token}; -use crate::parse::lexer::TokenAndSpan; +use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::token::DelimToken; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; @@ -251,6 +251,8 @@ pub struct Parser<'a> { /// /// See the comments in the `parse_path_segment` function for more details. crate unmatched_angle_bracket_count: u32, + crate max_angle_bracket_count: u32, + crate unclosed_delims: Vec<UnmatchedBrace>, } @@ -573,6 +575,8 @@ impl<'a> Parser<'a> { desugar_doc_comments, cfg_mods: true, unmatched_angle_bracket_count: 0, + max_angle_bracket_count: 0, + unclosed_delims: Vec::new(), }; let tok = parser.next_tok(); @@ -642,11 +646,11 @@ impl<'a> Parser<'a> { /// Expect and consume the token t. Signal an error if /// the next token is not t. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> { + pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(()) + Ok(false) } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); @@ -661,6 +665,12 @@ impl<'a> Parser<'a> { self.sess.source_map().next_point(self.prev_span) }; let label_exp = format!("expected `{}`", token_str); + match self.recover_closing_delimiter(&[t.clone()], err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { @@ -680,12 +690,62 @@ impl<'a> Parser<'a> { } } + fn recover_closing_delimiter( + &mut self, + tokens: &[token::Token], + mut err: DiagnosticBuilder<'a>, + ) -> PResult<'a, bool> { + let mut pos = None; + // we want to use the last closing delim that would apply + for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { + if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) + && Some(self.span) > unmatched.unclosed_span + { + pos = Some(i); + } + } + match pos { + Some(pos) => { + // Recover and assume that the detected unclosed delimiter was meant for + // this location. Emit the diagnostic and act as if the delimiter was + // present for the parser's sake. + + // Don't attempt to recover from this unclosed delimiter more than once. + let unmatched = self.unclosed_delims.remove(pos); + let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); + + // We want to suggest the inclusion of the closing delimiter where it makes + // the most sense, which is immediately after the last token: + // + // {foo(bar {}} + // - ^ help: `)` may belong here + // | + // in order to close this... + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "in order to close this..."); + } + err.span_suggestion_short_with_applicability( + self.sess.source_map().next_point(self.prev_span), + &format!("{} may belong here", delim.to_string()), + delim.to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + // self.expected_tokens.clear(); // reduce errors + Ok(true) + } + _ => Err(err), + } + } + /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. - pub fn expect_one_of(&mut self, - edible: &[token::Token], - inedible: &[token::Token]) -> PResult<'a, ()>{ + pub fn expect_one_of( + &mut self, + edible: &[token::Token], + inedible: &[token::Token], + ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. @@ -705,10 +765,10 @@ impl<'a> Parser<'a> { } if edible.contains(&self.token) { self.bump(); - Ok(()) + Ok(false) } else if inedible.contains(&self.token) { // leave it in the input - Ok(()) + Ok(false) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) @@ -759,6 +819,15 @@ impl<'a> Parser<'a> { } else { label_sp }; + match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { + TokenType::Token(t) => Some(t.clone()), + _ => None, + }).collect::<Vec<_>>(), err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { @@ -1053,6 +1122,7 @@ impl<'a> Parser<'a> { if ate { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count += 1; + self.max_angle_bracket_count += 1; debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); } @@ -1093,14 +1163,30 @@ impl<'a> Parser<'a> { }; match ate { - Some(x) => { + Some(_) => { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count -= 1; debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); - Ok(x) + Ok(()) }, - None => self.unexpected(), + None => { + match ( + &self.token, + self.unmatched_angle_bracket_count, + self.max_angle_bracket_count > 1, + ) { + // (token::OpenDelim(_), 1, true) | (token::Semi, 1, true) => { + // self.struct_span_err( + // self.span, + // &format!("expected `>`, found `{}`", self.this_token_to_string()), + // // ).span_suggestion_short_with_applicability( + // ).emit(); + // Ok(()) + // } + _ => self.unexpected(), + } + } } } @@ -1127,19 +1213,22 @@ impl<'a> Parser<'a> { -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let val = self.parse_seq_to_before_end(ket, sep, f)?; - self.bump(); + let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.bump(); + } Ok(val) } /// Parse a sequence, not including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_before_end<T, F>(&mut self, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec<T>> + pub fn parse_seq_to_before_end<T, F>( + &mut self, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, (Vec<T>, bool)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) @@ -1151,10 +1240,11 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: F, - ) -> PResult<'a, Vec<T>> + ) -> PResult<'a, (Vec<T>, bool /* recovered */)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { - let mut first: bool = true; + let mut first = true; + let mut recovered = false; let mut v = vec![]; while !kets.iter().any(|k| { match expect { @@ -1170,23 +1260,30 @@ impl<'a> Parser<'a> { if first { first = false; } else { - if let Err(mut e) = self.expect(t) { - // Attempt to keep parsing if it was a similar separator - if let Some(ref tokens) = t.similar_tokens() { - if tokens.contains(&self.token) { - self.bump(); - } + match self.expect(t) { + Ok(false) => {} + Ok(true) => { + recovered = true; + break; } - e.emit(); - // Attempt to keep parsing if it was an omitted separator - match f(self) { - Ok(t) => { - v.push(t); - continue; - }, - Err(mut e) => { - e.cancel(); - break; + Err(mut e) => { + // Attempt to keep parsing if it was a similar separator + if let Some(ref tokens) = t.similar_tokens() { + if tokens.contains(&self.token) { + self.bump(); + } + } + e.emit(); + // Attempt to keep parsing if it was an omitted separator + match f(self) { + Ok(t) => { + v.push(t); + continue; + }, + Err(mut e) => { + e.cancel(); + break; + } } } } @@ -1205,23 +1302,26 @@ impl<'a> Parser<'a> { v.push(t); } - Ok(v) + Ok((v, recovered)) } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - fn parse_unspanned_seq<T, F>(&mut self, - bra: &token::Token, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec<T>> where + fn parse_unspanned_seq<T, F>( + &mut self, + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; - let result = self.parse_seq_to_before_end(ket, sep, f)?; - self.eat(ket); + let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.eat(ket); + } Ok(result) } @@ -2273,7 +2373,10 @@ impl<'a> Parser<'a> { // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If // it isn't, then we reset the unmatched angle bracket count as we're about to start // parsing a new path. - if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; } + if style == PathStyle::Expr { + self.unmatched_angle_bracket_count = 0; + self.max_angle_bracket_count = 0; + } let args = if self.eat_lt() { // `<'a, T, A = U>` @@ -2285,12 +2388,14 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` self.bump(); // `(` - let inputs = self.parse_seq_to_before_tokens( + let (inputs, recovered) = self.parse_seq_to_before_tokens( &[&token::CloseDelim(token::Paren)], SeqSep::trailing_allowed(token::Comma), TokenExpectType::Expect, |p| p.parse_ty())?; - self.bump(); // `)` + if !recovered { + self.bump(); // `)` + } let span = lo.to(self.prev_span); let output = if self.eat(&token::RArrow) { Some(self.parse_ty_common(false, false)?) @@ -2496,9 +2601,13 @@ impl<'a> Parser<'a> { // (e,) is a tuple with only one field, e let mut es = vec![]; let mut trailing_comma = false; + let mut recovered = false; while self.token != token::CloseDelim(token::Paren) { es.push(self.parse_expr()?); - self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?; + recovered = self.expect_one_of( + &[], + &[token::Comma, token::CloseDelim(token::Paren)], + )?; if self.eat(&token::Comma) { trailing_comma = true; } else { @@ -2506,7 +2615,9 @@ impl<'a> Parser<'a> { break; } } - self.bump(); + if !recovered { + self.bump(); + } hi = self.prev_span; ex = if es.len() == 1 && !trailing_comma { @@ -2802,7 +2913,7 @@ impl<'a> Parser<'a> { match self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) { - Ok(()) => if let Some(f) = parsed_field.or(recovery_field) { + Ok(_) => if let Some(f) = parsed_field.or(recovery_field) { // only include the field if there's no parse error for the field name fields.push(f); } @@ -6011,7 +6122,7 @@ impl<'a> Parser<'a> { let sp = self.span; let mut variadic = false; - let args: Vec<Option<Arg>> = + let (args, recovered): (Vec<Option<Arg>>, bool) = self.parse_seq_to_before_end( &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), @@ -6059,7 +6170,9 @@ impl<'a> Parser<'a> { } )?; - self.eat(&token::CloseDelim(token::Paren)); + if !recovered { + self.eat(&token::CloseDelim(token::Paren)); + } let args: Vec<_> = args.into_iter().filter_map(|x| x).collect(); @@ -6204,15 +6317,15 @@ impl<'a> Parser<'a> { // Parse the rest of the function parameter list. let sep = SeqSep::trailing_allowed(token::Comma); - let fn_inputs = if let Some(self_arg) = self_arg { + let (fn_inputs, recovered) = if let Some(self_arg) = self_arg { if self.check(&token::CloseDelim(token::Paren)) { - vec![self_arg] + (vec![self_arg], false) } else if self.eat(&token::Comma) { let mut fn_inputs = vec![self_arg]; - fn_inputs.append(&mut self.parse_seq_to_before_end( - &token::CloseDelim(token::Paren), sep, parse_arg_fn)? - ); - fn_inputs + let (mut input, recovered) = self.parse_seq_to_before_end( + &token::CloseDelim(token::Paren), sep, parse_arg_fn)?; + fn_inputs.append(&mut input); + (fn_inputs, recovered) } else { return self.unexpected(); } @@ -6220,8 +6333,10 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)? }; - // Parse closing paren and return type. - self.expect(&token::CloseDelim(token::Paren))?; + if !recovered { + // Parse closing paren and return type. + self.expect(&token::CloseDelim(token::Paren))?; + } Ok(P(FnDecl { inputs: fn_inputs, output: self.parse_ret_ty(true)?, @@ -6241,7 +6356,7 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), TokenExpectType::NoExpect, |p| p.parse_fn_block_arg() - )?; + )?.0; self.expect_or()?; args } @@ -8238,7 +8353,7 @@ impl<'a> Parser<'a> { // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { - self.expect(&token::Semi)? + self.expect(&token::Semi)?; } Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim }))) @@ -8383,11 +8498,27 @@ impl<'a> Parser<'a> { /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { let lo = self.span; - Ok(ast::Crate { + let krate = Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, span: lo.to(self.span), - }) + }); + for unmatched in &self.unclosed_delims { + let mut err = self.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + err.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + err.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "un-closed delimiter"); + } + err.emit(); + } + self.unclosed_delims.clear(); + krate } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 3b1fa5ea01f..a0f3113a1cb 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -487,8 +487,8 @@ impl Token { /// Enables better error recovery when the wrong token is found. crate fn similar_tokens(&self) -> Option<Vec<Token>> { match *self { - Comma => Some(vec![Dot, Lt]), - Semi => Some(vec![Colon]), + Comma => Some(vec![Dot, Lt, Semi]), + Semi => Some(vec![Colon, Comma]), _ => None } } @@ -545,7 +545,15 @@ impl Token { // FIXME(#43081): Avoid this pretty-print + reparse hack let source = pprust::token_to_string(self); let filename = FileName::macro_expansion_source_code(&source); - parse_stream_from_source_str(filename, source, sess, Some(span)) + let (tokens, errors) = parse_stream_from_source_str( + filename, source, sess, Some(span)); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter for_real", + ).emit(); + } + tokens }); // During early phases of the compiler the AST could get modified @@ -786,12 +794,18 @@ fn prepend_attrs(sess: &ParseSess, let source = pprust::attr_to_string(attr); let macro_filename = FileName::macro_expansion_source_code(&source); if attr.is_sugared_doc { - let stream = parse_stream_from_source_str( + let (stream, errors) = parse_stream_from_source_str( macro_filename, source, sess, Some(span), ); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter attrs", + ).emit(); + } builder.push(stream); continue } @@ -808,12 +822,18 @@ fn prepend_attrs(sess: &ParseSess, // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. } else { - let stream = parse_stream_from_source_str( + let (stream, errors) = parse_stream_from_source_str( macro_filename, source, sess, Some(span), ); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter attrs 2", + ).emit(); + } brackets.push(stream); } |
