diff options
| author | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-05 00:02:59 +0300 |
|---|---|---|
| committer | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-06 14:03:15 +0300 |
| commit | c0c57acd7b8061697d196fd800a7ff3151c37f38 (patch) | |
| tree | 60a34ef9211243a932a90f0fec3bd5f4e82fcd1d /src/libsyntax | |
| parent | e0127dbf8135b766a332ce21c4eee48998b59bef (diff) | |
| download | rust-c0c57acd7b8061697d196fd800a7ff3151c37f38.tar.gz rust-c0c57acd7b8061697d196fd800a7ff3151c37f38.zip | |
syntax: Use `Token` in `StringReader` and `TokenTreesReader`
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 35 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 43 |
2 files changed, 32 insertions, 46 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 225db0164fe..9dba5ff3e8c 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -12,7 +12,6 @@ use core::unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; use std::iter; -use std::mem::replace; use rustc_data_structures::sync::Lrc; use log::debug; @@ -41,8 +40,7 @@ pub struct StringReader<'a> { /// Stop reading src at this index. crate end_src_index: usize, // cached: - peek_tok: TokenKind, - peek_span: Span, + peek_token: Token, peek_span_src_raw: Span, fatal_errs: Vec<DiagnosticBuilder<'a>>, // cache a direct reference to the source text, so that we don't have to @@ -90,10 +88,7 @@ impl<'a> StringReader<'a> { /// Returns the next token. EFFECT: advances the string_reader. pub fn try_next_token(&mut self) -> Result<Token, ()> { assert!(self.fatal_errs.is_empty()); - let ret_val = Token { - kind: replace(&mut self.peek_tok, token::Whitespace), - span: self.peek_span, - }; + let ret_val = self.peek_token.clone(); self.advance_token()?; Ok(ret_val) } @@ -158,7 +153,7 @@ impl<'a> StringReader<'a> { } fn fatal(&self, m: &str) -> FatalError { - self.fatal_span(self.peek_span, m) + self.fatal_span(self.peek_token.span, m) } crate fn emit_fatal_errors(&mut self) { @@ -179,12 +174,8 @@ impl<'a> StringReader<'a> { buffer } - pub fn peek(&self) -> Token { - // FIXME(pcwalton): Bad copy! - Token { - kind: self.peek_tok.clone(), - span: self.peek_span, - } + pub fn peek(&self) -> &Token { + &self.peek_token } /// For comments.rs, which hackily pokes into next_pos and ch @@ -215,8 +206,7 @@ impl<'a> StringReader<'a> { source_file, end_src_index: src.len(), // dummy values; not read - peek_tok: token::Eof, - peek_span: syntax_pos::DUMMY_SP, + peek_token: Token { kind: token::Eof, span: syntax_pos::DUMMY_SP }, peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), @@ -321,29 +311,28 @@ impl<'a> StringReader<'a> { self.err_span_(from_pos, to_pos, &m[..]); } - /// Advance peek_tok and peek_span to refer to the next token, and + /// Advance peek_token to refer to the next token, and /// possibly update the interner. fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { self.peek_span_src_raw = comment.span; - self.peek_span = comment.span; - self.peek_tok = comment.kind; + self.peek_token = comment; } None => { if self.is_eof() { - self.peek_tok = token::Eof; + let (real, raw) = self.mk_sp_and_raw( self.source_file.end_pos, self.source_file.end_pos, ); - self.peek_span = real; + self.peek_token = Token { kind: token::Eof, span: real }; self.peek_span_src_raw = raw; } else { let start_bytepos = self.pos; - self.peek_tok = self.next_token_inner()?; + let kind = self.next_token_inner()?; let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos); - self.peek_span = real; + self.peek_token = Token { kind, span: real }; self.peek_span_src_raw = raw; }; } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index abff7177abd..0dab441c96f 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -2,15 +2,15 @@ use syntax_pos::Span; use crate::print::pprust::token_to_string; use crate::parse::lexer::{StringReader, UnmatchedBrace}; -use crate::parse::{token, PResult}; +use crate::parse::token::{self, Token}; +use crate::parse::PResult; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; impl<'a> StringReader<'a> { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) { let mut tt_reader = TokenTreesReader { string_reader: self, - token: token::Eof, - span: syntax_pos::DUMMY_SP, + token: token::Token { kind: token::Eof, span: syntax_pos::DUMMY_SP }, open_braces: Vec::new(), unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), @@ -23,8 +23,7 @@ impl<'a> StringReader<'a> { struct TokenTreesReader<'a> { string_reader: StringReader<'a>, - token: token::TokenKind, - span: Span, + token: Token, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, unmatched_braces: Vec<UnmatchedBrace>, @@ -52,7 +51,7 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { - if let token::CloseDelim(..) = self.token { + if let token::CloseDelim(..) = self.token.kind { return TokenStream::new(tts); } @@ -68,11 +67,11 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { let sm = self.string_reader.sess.source_map(); - match self.token { + match self.token.kind { token::Eof => { let msg = "this file contains an un-closed delimiter"; let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span, msg); + .struct_span_err(self.token.span, msg); for &(_, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); } @@ -102,10 +101,10 @@ impl<'a> TokenTreesReader<'a> { }, token::OpenDelim(delim) => { // The span for beginning of the delimited section - let pre_span = self.span; + let pre_span = self.token.span; // Parse the open delimiter. - self.open_braces.push((delim, self.span)); + self.open_braces.push((delim, self.token.span)); self.real_token(); // Parse the token trees within the delimiters. @@ -114,9 +113,9 @@ impl<'a> TokenTreesReader<'a> { let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree - let delim_span = DelimSpan::from_pair(pre_span, self.span); + let delim_span = DelimSpan::from_pair(pre_span, self.token.span); - match self.token { + match self.token.kind { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); @@ -126,7 +125,7 @@ impl<'a> TokenTreesReader<'a> { self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.span), + (open_brace, open_brace_span, self.token.span), ); } // Parse the close delimiter. @@ -136,16 +135,16 @@ impl<'a> TokenTreesReader<'a> { token::CloseDelim(other) => { let mut unclosed_delimiter = None; let mut candidate = None; - if self.last_unclosed_found_span != Some(self.span) { + if self.last_unclosed_found_span != Some(self.token.span) { // do not complain about the same unclosed delimiter multiple times - self.last_unclosed_found_span = Some(self.span); + self.last_unclosed_found_span = Some(self.token.span); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { unclosed_delimiter = Some(sp); }; - if let Some(current_padding) = sm.span_to_margin(self.span) { + if let Some(current_padding) = sm.span_to_margin(self.token.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding @@ -159,7 +158,7 @@ impl<'a> TokenTreesReader<'a> { self.unmatched_braces.push(UnmatchedBrace { expected_delim: tok, found_delim: other, - found_span: self.span, + found_span: self.token.span, unclosed_span: unclosed_delimiter, candidate_span: candidate, }); @@ -198,12 +197,12 @@ impl<'a> TokenTreesReader<'a> { let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span, &msg); - err.span_label(self.span, "unexpected close delimiter"); + .struct_span_err(self.token.span, &msg); + err.span_label(self.token.span, "unexpected close delimiter"); Err(err) }, _ => { - let tt = TokenTree::token(self.span, self.token.clone()); + let tt = TokenTree::Token(self.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. @@ -219,8 +218,6 @@ impl<'a> TokenTreesReader<'a> { } fn real_token(&mut self) { - let t = self.string_reader.real_token(); - self.token = t.kind; - self.span = t.span; + self.token = self.string_reader.real_token(); } } |
