diff options
| author | Esteban Küber <esteban@kuber.com.ar> | 2019-05-21 17:47:23 -0700 |
|---|---|---|
| committer | Esteban Küber <esteban@kuber.com.ar> | 2019-05-24 11:49:33 -0700 |
| commit | 24160171e48a277ef71e84e14fbffffe3c81438a (patch) | |
| tree | e4d594acb224da13101b0146d9e785910021f412 /src/libsyntax | |
| parent | fc45382c125d940822368e866588568d78551946 (diff) | |
| download | rust-24160171e48a277ef71e84e14fbffffe3c81438a.tar.gz rust-24160171e48a277ef71e84e14fbffffe3c81438a.zip | |
Tweak macro parse errors when reaching EOF during macro call parse
- Add detail on origin of current parser when reaching EOF and stop saying "found <eof>" and point at the end of macro calls - Handle empty `cfg_attr` attribute - Reword empty `derive` attribute error
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/attr/mod.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/config.rs | 13 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/derive.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 91 |
8 files changed, 105 insertions, 44 deletions
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 2f75a8c9db5..48948e4d0d7 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -278,7 +278,14 @@ impl Attribute { pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let mut parser = Parser::new(sess, self.tokens.clone(), None, false, false); + let mut parser = Parser::new( + sess, + self.tokens.clone(), + None, + false, + false, + Some("attribute"), + ); let result = f(&mut parser)?; if parser.token != token::Eof { parser.unexpected()?; diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index c82936afa3d..ca047dc66cb 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -94,6 +94,17 @@ impl<'a> StripUnconfigured<'a> { if !attr.check_name(sym::cfg_attr) { return vec![attr]; } + if attr.tokens.len() == 0 { + self.sess.span_diagnostic.struct_span_err(attr.span, "bad `cfg_attr` attribute") + .span_label(attr.span, "missing condition and attribute") + .note("`cfg_attr` must be of the form: \ + `#[cfg_attr(condition, attribute)]`") + .note("for more information, visit \ + <https://doc.rust-lang.org/reference/conditional-compilation.html\ + #the-cfg_attr-attribute>") + .emit(); + return vec![]; + } let (cfg_predicate, expanded_attrs) = match attr.parse(self.sess, |parser| { parser.expect(&token::OpenDelim(token::Paren))?; @@ -117,7 +128,7 @@ impl<'a> StripUnconfigured<'a> { Ok(result) => result, Err(mut e) => { e.emit(); - return Vec::new(); + return vec![]; } }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index f1a20d54065..ef7317e0038 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -850,7 +850,11 @@ impl<'a> ExtCtxt<'a> { } pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { - parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect()) + parse::stream_to_parser( + self.parse_sess, + tts.iter().cloned().collect(), + Some("macro arguments"), + ) } pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 6e789c4c708..bbdda4932f1 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -17,8 +17,11 @@ pub fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> return true; } if !attr.is_meta_item_list() { - cx.span_err(attr.span, - "attribute must be of the form `#[derive(Trait1, Trait2, ...)]`"); + cx.struct_span_err(attr.span, "bad `derive` attribute") + .span_label(attr.span, "missing traits to be derived") + .note("`derive` must be of the form: \ + `#[derive(Trait1, Trait2, ...)]`") + .emit(); return false; } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index fa1f85c0e7b..02e986c9e75 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -658,7 +658,14 @@ pub fn parse( recurse_into_modules: bool, ) -> NamedParseResult { // Create a parser that can be used for the "black box" parts. - let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true); + let mut parser = Parser::new( + sess, + tts, + directory, + recurse_into_modules, + true, + Some("macro arguments"), + ); // A queue of possible matcher positions. We initialize it with the matcher position in which // the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 37c49112dca..2debd8f048b 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, path: Cow::from(cx.current_expansion.module.directory.as_path()), ownership: cx.current_expansion.directory_ownership, }; - let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false); + let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None); p.root_module_name = cx.current_expansion.module.mod_path.last() .map(|id| id.as_str().to_string()); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 1073fc6f3ab..ece6137e881 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -236,7 +236,7 @@ fn maybe_source_file_to_parser( ) -> Result<Parser<'_>, Vec<Diagnostic>> { let end_pos = source_file.end_pos; let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; - let mut parser = stream_to_parser(sess, stream); + let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); @@ -248,7 +248,7 @@ fn maybe_source_file_to_parser( // must preserve old name for now, because quote! from the *existing* // compiler expands into it pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> { - stream_to_parser(sess, tts.into_iter().collect()) + stream_to_parser(sess, tts.into_iter().collect(), Some("macro arguments")) } @@ -328,8 +328,12 @@ pub fn maybe_file_to_stream( } /// Given stream and the `ParseSess`, produces a parser. -pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> { - Parser::new(sess, stream, None, true, false) +pub fn stream_to_parser<'a>( + sess: &'a ParseSess, + stream: TokenStream, + is_subparser: Option<&'static str>, +) -> Parser<'a> { + Parser::new(sess, stream, None, true, false, is_subparser) } /// Given stream, the `ParseSess` and the base directory, produces a parser. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 11c566b65e5..38aa5091f98 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -51,7 +51,7 @@ use crate::symbol::{kw, sym, Symbol}; use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; use rustc_target::spec::abi::{self, Abi}; use syntax_pos::{ - Span, MultiSpan, BytePos, FileName, + BytePos, DUMMY_SP, FileName, MultiSpan, Span, hygiene::CompilerDesugaringKind, }; use log::{debug, trace}; @@ -233,6 +233,8 @@ pub struct Parser<'a> { /// error. crate unclosed_delims: Vec<UnmatchedBrace>, last_unexpected_token_span: Option<Span>, + /// If `true`, this `Parser` is not parsing Rust code but rather a macro call. + is_subparser: Option<&'static str>, } impl<'a> Drop for Parser<'a> { @@ -309,7 +311,7 @@ impl TokenCursor { self.frame = frame; continue } else { - return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP } + return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP } }; match self.frame.last_token { @@ -533,17 +535,19 @@ enum TokenExpectType { } impl<'a> Parser<'a> { - pub fn new(sess: &'a ParseSess, - tokens: TokenStream, - directory: Option<Directory<'a>>, - recurse_into_file_modules: bool, - desugar_doc_comments: bool) - -> Self { + pub fn new( + sess: &'a ParseSess, + tokens: TokenStream, + directory: Option<Directory<'a>>, + recurse_into_file_modules: bool, + desugar_doc_comments: bool, + is_subparser: Option<&'static str>, + ) -> Self { let mut parser = Parser { sess, token: token::Whitespace, - span: syntax_pos::DUMMY_SP, - prev_span: syntax_pos::DUMMY_SP, + span: DUMMY_SP, + prev_span: DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), @@ -568,6 +572,7 @@ impl<'a> Parser<'a> { max_angle_bracket_count: 0, unclosed_delims: Vec::new(), last_unexpected_token_span: None, + is_subparser, }; let tok = parser.next_tok(); @@ -639,16 +644,28 @@ impl<'a> Parser<'a> { } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); - let mut err = self.fatal(&format!("expected `{}`, found {}", - token_str, - this_token_str)); - - let sp = if self.token == token::Token::Eof { - // EOF, don't want to point at the following char, but rather the last token - self.prev_span - } else { - self.sess.source_map().next_point(self.prev_span) + let (prev_sp, sp) = match (&self.token, self.is_subparser) { + // Point at the end of the macro call when reaching end of macro arguments. + (token::Token::Eof, Some(_)) => { + let sp = self.sess.source_map().next_point(self.span); + (sp, sp) + } + // We don't want to point at the following span after DUMMY_SP. + // This happens when the parser finds an empty TokenStream. + _ if self.prev_span == DUMMY_SP => (self.span, self.span), + // EOF, don't want to point at the following char, but rather the last token. + (token::Token::Eof, None) => (self.prev_span, self.span), + _ => (self.sess.source_map().next_point(self.prev_span), self.span), }; + let msg = format!( + "expected `{}`, found {}", + token_str, + match (&self.token, self.is_subparser) { + (token::Token::Eof, Some(origin)) => format!("end of {}", origin), + _ => this_token_str, + }, + ); + let mut err = self.struct_span_err(sp, &msg); let label_exp = format!("expected `{}`", token_str); match self.recover_closing_delimiter(&[t.clone()], err) { Err(e) => err = e, @@ -657,15 +674,15 @@ impl<'a> Parser<'a> { } } let cm = self.sess.source_map(); - match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { + match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content // between them is whitespace, point only at the found token. - err.span_label(self.span, label_exp); + err.span_label(sp, label_exp); } _ => { - err.span_label(sp, label_exp); - err.span_label(self.span, "unexpected token"); + err.span_label(prev_sp, label_exp); + err.span_label(sp, "unexpected token"); } } Err(err) @@ -812,7 +829,7 @@ impl<'a> Parser<'a> { // | expected one of 8 possible tokens here err.span_label(self.span, label_exp); } - _ if self.prev_span == syntax_pos::DUMMY_SP => { + _ if self.prev_span == DUMMY_SP => { // Account for macro context where the previous span might not be // available to avoid incorrect output (#54841). err.span_label(self.span, "unexpected token"); @@ -2041,7 +2058,7 @@ impl<'a> Parser<'a> { path = self.parse_path(PathStyle::Type)?; path_span = path_lo.to(self.prev_span); } else { - path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP }; + path = ast::Path { segments: Vec::new(), span: DUMMY_SP }; path_span = self.span.to(self.span); } @@ -2627,16 +2644,24 @@ impl<'a> Parser<'a> { } Err(mut err) => { self.cancel(&mut err); - let msg = format!("expected expression, found {}", - self.this_token_descr()); - let mut err = self.fatal(&msg); + let (span, msg) = match (&self.token, self.is_subparser) { + (&token::Token::Eof, Some(origin)) => { + let sp = self.sess.source_map().next_point(self.span); + (sp, format!( "expected expression, found end of {}", origin)) + } + _ => (self.span, format!( + "expected expression, found {}", + self.this_token_descr(), + )), + }; + let mut err = self.struct_span_err(span, &msg); let sp = self.sess.source_map().start_point(self.span); if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow() .get(&sp) { self.sess.expr_parentheses_needed(&mut err, *sp, None); } - err.span_label(self.span, "expected expression"); + err.span_label(span, "expected expression"); return Err(err); } } @@ -5592,7 +5617,7 @@ impl<'a> Parser<'a> { where_clause: WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), - span: syntax_pos::DUMMY_SP, + span: DUMMY_SP, }, span: span_lo.to(self.prev_span), }) @@ -5838,7 +5863,7 @@ impl<'a> Parser<'a> { let mut where_clause = WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), - span: syntax_pos::DUMMY_SP, + span: DUMMY_SP, }; if !self.eat_keyword(kw::Where) { @@ -7005,7 +7030,7 @@ impl<'a> Parser<'a> { Ident::with_empty_ctxt(sym::warn_directory_ownership)), tokens: TokenStream::empty(), is_sugared_doc: false, - span: syntax_pos::DUMMY_SP, + span: DUMMY_SP, }; attr::mark_known(&attr); attrs.push(attr); @@ -7013,7 +7038,7 @@ impl<'a> Parser<'a> { Ok((id, ItemKind::Mod(module), Some(attrs))) } else { let placeholder = ast::Mod { - inner: syntax_pos::DUMMY_SP, + inner: DUMMY_SP, items: Vec::new(), inline: false }; |
