diff options
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 21 | ||||
| -rw-r--r-- | src/libsyntax/parse/literal.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/attr.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/expr.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 2 |
6 files changed, 31 insertions, 15 deletions
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index e5ba7e45309..b4dd23c9f9b 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,3 +1,4 @@ +use rustc_data_structures::fx::FxHashMap; use syntax_pos::Span; use crate::print::pprust::token_to_string; @@ -16,6 +17,7 @@ impl<'a> StringReader<'a> { unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), last_unclosed_found_span: None, + last_delim_empty_block_spans: FxHashMap::default() }; let res = tt_reader.parse_all_token_trees(); (res, tt_reader.unmatched_braces) @@ -34,6 +36,7 @@ struct TokenTreesReader<'a> { /// Used only for error recovery when arriving to EOF with mismatched braces. matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, last_unclosed_found_span: Option<Span>, + last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span> } impl<'a> TokenTreesReader<'a> { @@ -121,13 +124,20 @@ impl<'a> TokenTreesReader<'a> { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); + let close_brace_span = self.token.span; + + if tts.is_empty() { + let empty_block_span = open_brace_span.to(close_brace_span); + self.last_delim_empty_block_spans.insert(delim, empty_block_span); + } + if self.open_braces.len() == 0 { // Clear up these spans to avoid suggesting them as we've found // properly matched delimiters so far for an entire block. self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.token.span), + (open_brace, open_brace_span, close_brace_span), ); } // Parse the close delimiter. @@ -193,13 +203,20 @@ impl<'a> TokenTreesReader<'a> { tts.into() ).into()) }, - token::CloseDelim(_) => { + token::CloseDelim(delim) => { // An unexpected closing delimiter (i.e., there is no // matching opening delimiter). let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.string_reader.sess.span_diagnostic .struct_span_err(self.token.span, &msg); + + if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) { + err.span_label( + span, + "this block is empty, you might have not meant to close it" + ); + } err.span_label(self.token.span, "unexpected close delimiter"); Err(err) }, diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 14e1696610a..7952e293a53 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -3,7 +3,7 @@ use crate::ast::{self, Lit, LitKind}; use crate::parse::token::{self, Token}; use crate::symbol::{kw, sym, Symbol}; -use crate::tokenstream::{TokenStream, TokenTree}; +use crate::tokenstream::TokenTree; use log::debug; use rustc_data_structures::sync::Lrc; @@ -216,13 +216,13 @@ impl Lit { Lit { token: kind.to_lit_token(), kind, span } } - /// Losslessly convert an AST literal into a token stream. - crate fn tokens(&self) -> TokenStream { + /// Losslessly convert an AST literal into a token tree. + crate fn token_tree(&self) -> TokenTree { let token = match self.token.kind { token::Bool => token::Ident(self.token.symbol, false), _ => token::Literal(self.token), }; - TokenTree::token(token, self.span).into() + TokenTree::token(token, self.span) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9cb410a8ae3..6bbd8be0cb9 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -285,10 +285,10 @@ impl TokenCursor { token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] - .iter().cloned().collect::<TokenStream>().into() + .iter().cloned().collect::<TokenStream>() } else { [TokenTree::token(token::Pound, sp), body] - .iter().cloned().collect::<TokenStream>().into() + .iter().cloned().collect::<TokenStream>() }, ))); diff --git a/src/libsyntax/parse/parser/attr.rs b/src/libsyntax/parse/parser/attr.rs index 6f7d1ead4c1..188a144cac9 100644 --- a/src/libsyntax/parse/parser/attr.rs +++ b/src/libsyntax/parse/parser/attr.rs @@ -6,7 +6,6 @@ use crate::tokenstream::{TokenStream, TokenTree}; use crate::source_map::Span; use log::debug; -use smallvec::smallvec; #[derive(Debug)] enum InnerAttributeParsePolicy<'a> { @@ -193,15 +192,15 @@ impl<'a> Parser<'a> { is_interpolated_expr = true; } } - let tokens = if is_interpolated_expr { + let token_tree = if is_interpolated_expr { // We need to accept arbitrary interpolated expressions to continue // supporting things like `doc = $expr` that work on stable. // Non-literal interpolated expressions are rejected after expansion. - self.parse_token_tree().into() + self.parse_token_tree() } else { - self.parse_unsuffixed_lit()?.tokens() + self.parse_unsuffixed_lit()?.token_tree() }; - TokenStream::from_streams(smallvec![eq.into(), tokens]) + TokenStream::new(vec![eq.into(), token_tree.into()]) } else { TokenStream::default() }; diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 273f5a5ffa3..67a530ec683 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -423,7 +423,7 @@ impl<'a> Parser<'a> { self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator") .span_suggestion_short( span_of_tilde, - "use `!` to perform bitwise negation", + "use `!` to perform bitwise not", "!".to_owned(), Applicability::MachineApplicable ) diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index e527989fb0b..4a8b25c6107 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -33,7 +33,7 @@ pub enum BinOpToken { } /// A delimiter token. -#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum DelimToken { /// A round parenthesis (i.e., `(` or `)`). Paren, |
