diff options
| author | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-03-28 05:32:43 +0000 |
|---|---|---|
| committer | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-06-26 02:05:45 +0000 |
| commit | d4488b7df97e62bfeed8c30b1922ce55ff254594 (patch) | |
| tree | 3e65f4bf53f191bba6ec937843a8a73e019686d6 /src/libsyntax/parse | |
| parent | fc9ccfdbe02f4cf3e3ea60ee4412f00d29ef7f53 (diff) | |
| download | rust-d4488b7df97e62bfeed8c30b1922ce55ff254594.tar.gz rust-d4488b7df97e62bfeed8c30b1922ce55ff254594.zip | |
Simplify `hygiene::Mark` application, and
remove variant `Token::SubstNt` in favor of `quoted::TokenTree::MetaVar`.
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 42 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 13 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 5 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 3 |
4 files changed, 34 insertions, 29 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index e2656bea483..afc1e583d69 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -66,14 +66,15 @@ pub struct StringReader<'a> { token: token::Token, span: Span, open_braces: Vec<(token::DelimToken, Span)>, -} - -fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } + pub override_span: Option<Span>, } impl<'a> StringReader<'a> { - fn next_token(&mut self) -> TokenAndSpan { + fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { + unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION}) + } + + fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } @@ -175,6 +176,7 @@ impl<'a> StringReader<'a> { token: token::Eof, span: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + override_span: None, } } @@ -229,12 +231,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(mk_sp(from_pos, to_pos), m) + self.fatal_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(mk_sp(from_pos, to_pos), m) + self.err_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -258,7 +260,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -282,7 +284,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -306,11 +308,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = mk_sp(start_bytepos, self.pos); + self.peek_span = self.mk_sp(start_bytepos, self.pos); }; } } @@ -502,7 +504,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg); } } @@ -545,13 +547,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) } } @@ -584,7 +586,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: mk_sp(start, self.pos), + sp: self.mk_sp(start, self.pos), }); } } @@ -612,7 +614,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -674,7 +676,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } @@ -869,7 +871,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = mk_sp(start, self.pos); + let span = self.mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -907,13 +909,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3a68a6ba764..f917eec2cd0 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -141,9 +141,10 @@ pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess) - -> TokenStream { - filemap_to_stream(sess, sess.codemap().new_filemap(name, source)) +pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess, + override_span: Option<Span>) + -> TokenStream { + filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span) } // Create a new parser from a source string @@ -177,7 +178,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a filemap and config, return a parser pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser { let end_pos = filemap.end_pos; - let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); + let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; @@ -212,8 +213,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream { +pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>, override_span: Option<Span>) + -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); + srdr.override_span = override_span; srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 851a638e148..25ab46f6f9e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2626,7 +2626,10 @@ impl<'a> Parser<'a> { pub fn process_potential_macro_variable(&mut self) { let ident = match self.token { - token::SubstNt(name) => { + token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() && + self.look_ahead(1, |t| t.is_ident()) => { + self.bump(); + let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() }; self.fatal(&format!("unknown macro variable `{}`", name)).emit(); return } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 77db604c56e..f208b0f56f8 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -172,9 +172,6 @@ pub enum Token { // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), - // In right-hand-sides of MBE macros: - /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for |
