diff options
| author | bors <bors@rust-lang.org> | 2019-06-07 06:52:09 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2019-06-07 06:52:09 +0000 |
| commit | ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4 (patch) | |
| tree | 07a0d2ef9340fa064341cc697a8ae58e3762373a /src/libsyntax/parse | |
| parent | c5295ac64a8f2c7aee9cdd13b8fe00b82aff8435 (diff) | |
| parent | 3a31f0634bb1669eae64e83f595942986f867125 (diff) | |
| download | rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.tar.gz rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.zip | |
Auto merge of #61541 - petrochenkov:tsp, r=oli-obk
syntax: Keep token span as a part of `Token` In the world with proc macros and edition hygiene `Token` without a span is not self-contained. In practice this means that tokens and spans are always stored and passed somewhere along with each other. This PR combines them into a single struct by doing the next renaming/replacement: - `Token` -> `TokenKind` - `TokenAndSpan` -> `Token` - `(Token, Span)` -> `Token` Some later commits (https://github.com/rust-lang/rust/commit/fb6e2fe8fd6caed247857758c6c3549fe2b59527 and https://github.com/rust-lang/rust/commit/1cdee86940db892cd17239c26add5364335e895a) remove duplicate spans in `token::Ident` and `token::Lifetime`. Those spans were supposed to be identical to token spans, but could easily go out of sync, as was noticed in https://github.com/rust-lang/rust/pull/60965#discussion_r285398523. The `(Token, Span)` -> `Token` change is a soft pre-requisite for this de-duplication since it allows to avoid some larger churn (passing spans to most of functions classifying identifiers).
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/parse/diagnostics.rs | 41 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 237 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 43 | ||||
| -rw-r--r-- | src/libsyntax/parse/literal.rs | 43 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 65 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 297 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 199 |
8 files changed, 477 insertions, 462 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e99a86e807f..d83b76f4d23 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -24,7 +24,7 @@ impl<'a> Parser<'a> { let mut just_parsed_doc_comment = false; loop { debug!("parse_outer_attributes: self.token={:?}", self.token); - match self.token { + match self.token.kind { token::Pound => { let inner_error_reason = if just_parsed_doc_comment { "an inner attribute is not permitted following an outer doc comment" @@ -81,7 +81,7 @@ impl<'a> Parser<'a> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, path, tokens, style) = match self.token { + let (span, path, tokens, style) = match self.token.kind { token::Pound => { let lo = self.span; self.bump(); @@ -140,7 +140,7 @@ impl<'a> Parser<'a> { /// PATH `=` TOKEN_TREE /// The delimiters or `=` are still put into the resulting token stream. crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { - let meta = match self.token { + let meta = match self.token.kind { token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, @@ -157,9 +157,9 @@ impl<'a> Parser<'a> { self.check(&token::OpenDelim(DelimToken::Brace)) { self.parse_token_tree().into() } else if self.eat(&token::Eq) { - let eq = TokenTree::Token(self.prev_span, token::Eq); + let eq = TokenTree::token(token::Eq, self.prev_span); let mut is_interpolated_expr = false; - if let token::Interpolated(nt) = &self.token { + if let token::Interpolated(nt) = &self.token.kind { if let token::NtExpr(..) = **nt { is_interpolated_expr = true; } @@ -188,7 +188,7 @@ impl<'a> Parser<'a> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = vec![]; loop { - match self.token { + match self.token.kind { token::Pound => { // Don't even try to parse if it's not an inner attribute. if !self.look_ahead(1, |t| t == &token::Not) { @@ -236,7 +236,7 @@ impl<'a> Parser<'a> { /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { - let nt_meta = match self.token { + let nt_meta = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref e) => Some(e.clone()), _ => None, diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 5df22f28797..7f0bf4a9050 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -2,8 +2,9 @@ use crate::ast::{ self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData, }; -use crate::parse::{SeqSep, token, PResult, Parser}; +use crate::parse::{SeqSep, PResult, Parser}; use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType}; +use crate::parse::token::{self, TokenKind}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::Spanned; @@ -201,12 +202,12 @@ impl<'a> Parser<'a> { self.span, &format!("expected identifier, found {}", self.this_token_descr()), ); - if let token::Ident(ident, false) = &self.token { - if ident.is_raw_guess() { + if let token::Ident(name, false) = self.token.kind { + if Ident::new(name, self.span).is_raw_guess() { err.span_suggestion( self.span, "you can escape reserved keywords to use them as identifiers", - format!("r#{}", ident), + format!("r#{}", name), Applicability::MaybeIncorrect, ); } @@ -229,8 +230,8 @@ impl<'a> Parser<'a> { pub fn expected_one_of_not_found( &mut self, - edible: &[token::Token], - inedible: &[token::Token], + edible: &[TokenKind], + inedible: &[TokenKind], ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -294,7 +295,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - let sp = if self.token == token::Token::Eof { + let sp = if self.token == token::Eof { // This is EOF, don't want to point at the following char, but rather the last token self.prev_span } else { @@ -368,7 +369,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - crate fn eat_to_tokens(&mut self, kets: &[&token::Token]) { + crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { let handler = self.diagnostic(); if let Err(ref mut err) = self.parse_seq_to_before_tokens( @@ -388,7 +389,7 @@ impl<'a> Parser<'a> { /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>(); /// ^^ help: remove extra angle brackets /// ``` - crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) { + crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { // This function is intended to be invoked after parsing a path segment where there are two // cases: // @@ -726,13 +727,13 @@ impl<'a> Parser<'a> { /// closing delimiter. pub fn unexpected_try_recover( &mut self, - t: &token::Token, + t: &TokenKind, ) -> PResult<'a, bool /* recovered */> { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); - let (prev_sp, sp) = match (&self.token, self.subparser_name) { + let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { // Point at the end of the macro call when reaching end of macro arguments. - (token::Token::Eof, Some(_)) => { + (token::Eof, Some(_)) => { let sp = self.sess.source_map().next_point(self.span); (sp, sp) } @@ -740,14 +741,14 @@ impl<'a> Parser<'a> { // This happens when the parser finds an empty TokenStream. _ if self.prev_span == DUMMY_SP => (self.span, self.span), // EOF, don't want to point at the following char, but rather the last token. - (token::Token::Eof, None) => (self.prev_span, self.span), + (token::Eof, None) => (self.prev_span, self.span), _ => (self.sess.source_map().next_point(self.prev_span), self.span), }; let msg = format!( "expected `{}`, found {}", token_str, - match (&self.token, self.subparser_name) { - (token::Token::Eof, Some(origin)) => format!("end of {}", origin), + match (&self.token.kind, self.subparser_name) { + (token::Eof, Some(origin)) => format!("end of {}", origin), _ => this_token_str, }, ); @@ -903,7 +904,7 @@ impl<'a> Parser<'a> { crate fn recover_closing_delimiter( &mut self, - tokens: &[token::Token], + tokens: &[TokenKind], mut err: DiagnosticBuilder<'a>, ) -> PResult<'a, bool> { let mut pos = None; @@ -989,7 +990,7 @@ impl<'a> Parser<'a> { break_on_semi, break_on_block); loop { debug!("recover_stmt_ loop {:?}", self.token); - match self.token { + match self.token.kind { token::OpenDelim(token::DelimToken::Brace) => { brace_depth += 1; self.bump(); @@ -1074,7 +1075,7 @@ impl<'a> Parser<'a> { } crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { let mut err = self.diagnostic().struct_span_err( self.span, &format!("documentation comments cannot be applied to {}", applied_to), @@ -1214,8 +1215,8 @@ impl<'a> Parser<'a> { } crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { - let (span, msg) = match (&self.token, self.subparser_name) { - (&token::Token::Eof, Some(origin)) => { + let (span, msg) = match (&self.token.kind, self.subparser_name) { + (&token::Eof, Some(origin)) => { let sp = self.sess.source_map().next_point(self.span); (sp, format!("expected expression, found end of {}", origin)) } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a06a84f162a..e3d959c2c54 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1,6 +1,6 @@ -use crate::ast::{self, Ident}; +use crate::ast; use crate::parse::ParseSess; -use crate::parse::token::{self, Token}; +use crate::parse::token::{self, Token, TokenKind}; use crate::symbol::{sym, Symbol}; use crate::parse::unescape; use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char}; @@ -12,7 +12,6 @@ use core::unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; use std::iter; -use std::mem::replace; use rustc_data_structures::sync::Lrc; use log::debug; @@ -21,21 +20,6 @@ mod tokentrees; mod unicode_chars; #[derive(Clone, Debug)] -pub struct TokenAndSpan { - pub tok: Token, - pub sp: Span, -} - -impl Default for TokenAndSpan { - fn default() -> Self { - TokenAndSpan { - tok: token::Whitespace, - sp: syntax_pos::DUMMY_SP, - } - } -} - -#[derive(Clone, Debug)] pub struct UnmatchedBrace { pub expected_delim: token::DelimToken, pub found_delim: token::DelimToken, @@ -56,8 +40,7 @@ pub struct StringReader<'a> { /// Stop reading src at this index. crate end_src_index: usize, // cached: - peek_tok: Token, - peek_span: Span, + peek_token: Token, peek_span_src_raw: Span, fatal_errs: Vec<DiagnosticBuilder<'a>>, // cache a direct reference to the source text, so that we don't have to @@ -78,16 +61,7 @@ impl<'a> StringReader<'a> { (real, raw) } - fn mk_ident(&self, string: &str) -> Ident { - let mut ident = Ident::from_str(string); - if let Some(span) = self.override_span { - ident.span = span; - } - - ident - } - - fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan { + fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token { match res { Ok(tok) => tok, Err(_) => { @@ -97,18 +71,15 @@ impl<'a> StringReader<'a> { } } - fn next_token(&mut self) -> TokenAndSpan where Self: Sized { + fn next_token(&mut self) -> Token where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } /// Returns the next token. EFFECT: advances the string_reader. - pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> { + pub fn try_next_token(&mut self) -> Result<Token, ()> { assert!(self.fatal_errs.is_empty()); - let ret_val = TokenAndSpan { - tok: replace(&mut self.peek_tok, token::Whitespace), - sp: self.peek_span, - }; + let ret_val = self.peek_token.take(); self.advance_token()?; Ok(ret_val) } @@ -135,10 +106,10 @@ impl<'a> StringReader<'a> { return None; } - fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> { + fn try_real_token(&mut self) -> Result<Token, ()> { let mut t = self.try_next_token()?; loop { - match t.tok { + match t.kind { token::Whitespace | token::Comment | token::Shebang(_) => { t = self.try_next_token()?; } @@ -149,7 +120,7 @@ impl<'a> StringReader<'a> { Ok(t) } - pub fn real_token(&mut self) -> TokenAndSpan { + pub fn real_token(&mut self) -> Token { let res = self.try_real_token(); self.unwrap_or_abort(res) } @@ -173,7 +144,7 @@ impl<'a> StringReader<'a> { } fn fatal(&self, m: &str) -> FatalError { - self.fatal_span(self.peek_span, m) + self.fatal_span(self.peek_token.span, m) } crate fn emit_fatal_errors(&mut self) { @@ -194,12 +165,8 @@ impl<'a> StringReader<'a> { buffer } - pub fn peek(&self) -> TokenAndSpan { - // FIXME(pcwalton): Bad copy! - TokenAndSpan { - tok: self.peek_tok.clone(), - sp: self.peek_span, - } + pub fn peek(&self) -> &Token { + &self.peek_token } /// For comments.rs, which hackily pokes into next_pos and ch @@ -229,9 +196,7 @@ impl<'a> StringReader<'a> { ch: Some('\n'), source_file, end_src_index: src.len(), - // dummy values; not read - peek_tok: token::Eof, - peek_span: syntax_pos::DUMMY_SP, + peek_token: Token::dummy(), peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), @@ -336,31 +301,24 @@ impl<'a> StringReader<'a> { self.err_span_(from_pos, to_pos, &m[..]); } - /// Advance peek_tok and peek_span to refer to the next token, and + /// Advance peek_token to refer to the next token, and /// possibly update the interner. fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { - self.peek_span_src_raw = comment.sp; - self.peek_span = comment.sp; - self.peek_tok = comment.tok; + self.peek_span_src_raw = comment.span; + self.peek_token = comment; } None => { - if self.is_eof() { - self.peek_tok = token::Eof; - let (real, raw) = self.mk_sp_and_raw( - self.source_file.end_pos, - self.source_file.end_pos, - ); - self.peek_span = real; - self.peek_span_src_raw = raw; + let (kind, start_pos, end_pos) = if self.is_eof() { + (token::Eof, self.source_file.end_pos, self.source_file.end_pos) } else { - let start_bytepos = self.pos; - self.peek_tok = self.next_token_inner()?; - let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos); - self.peek_span = real; - self.peek_span_src_raw = raw; + let start_pos = self.pos; + (self.next_token_inner()?, start_pos, self.pos) }; + let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos); + self.peek_token = Token::new(kind, real); + self.peek_span_src_raw = raw; } } @@ -527,7 +485,7 @@ impl<'a> StringReader<'a> { /// PRECONDITION: self.ch is not whitespace /// Eats any kind of comment. - fn scan_comment(&mut self) -> Option<TokenAndSpan> { + fn scan_comment(&mut self) -> Option<Token> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; @@ -563,14 +521,14 @@ impl<'a> StringReader<'a> { self.bump(); } - let tok = if doc_comment { + let kind = if doc_comment { self.with_str_from(start_bpos, |string| { token::DocComment(Symbol::intern(string)) }) } else { token::Comment }; - Some(TokenAndSpan { tok, sp: self.mk_sp(start_bpos, self.pos) }) + Some(Token::new(kind, self.mk_sp(start_bpos, self.pos))) } Some('*') => { self.bump(); @@ -594,10 +552,10 @@ impl<'a> StringReader<'a> { while !self.ch_is('\n') && !self.is_eof() { self.bump(); } - return Some(TokenAndSpan { - tok: token::Shebang(self.name_from(start)), - sp: self.mk_sp(start, self.pos), - }); + return Some(Token::new( + token::Shebang(self.name_from(start)), + self.mk_sp(start, self.pos), + )); } } None @@ -608,7 +566,7 @@ impl<'a> StringReader<'a> { /// If there is whitespace, shebang, or a comment, scan it. Otherwise, /// return `None`. - fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> { + fn scan_whitespace_or_comment(&mut self) -> Option<Token> { match self.ch.unwrap_or('\0') { // # to handle shebang at start of file -- this is the entry point // for skipping over all "junk" @@ -622,10 +580,7 @@ impl<'a> StringReader<'a> { while is_pattern_whitespace(self.ch) { self.bump(); } - let c = Some(TokenAndSpan { - tok: token::Whitespace, - sp: self.mk_sp(start_bpos, self.pos), - }); + let c = Some(Token::new(token::Whitespace, self.mk_sp(start_bpos, self.pos))); debug!("scanning whitespace: {:?}", c); c } @@ -634,7 +589,7 @@ impl<'a> StringReader<'a> { } /// Might return a sugared-doc-attr - fn scan_block_comment(&mut self) -> Option<TokenAndSpan> { + fn scan_block_comment(&mut self) -> Option<Token> { // block comments starting with "/**" or "/*!" are doc-comments let is_doc_comment = self.ch_is('*') || self.ch_is('!'); let start_bpos = self.pos - BytePos(2); @@ -671,7 +626,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start_bpos, |string| { // but comments with only "*"s between two "/"s are not - let tok = if is_block_doc_comment(string) { + let kind = if is_block_doc_comment(string) { let string = if has_cr { self.translate_crlf(start_bpos, string, @@ -684,10 +639,7 @@ impl<'a> StringReader<'a> { token::Comment }; - Some(TokenAndSpan { - tok, - sp: self.mk_sp(start_bpos, self.pos), - }) + Some(Token::new(kind, self.mk_sp(start_bpos, self.pos))) }) } @@ -847,7 +799,7 @@ impl<'a> StringReader<'a> { } } - fn binop(&mut self, op: token::BinOpToken) -> Token { + fn binop(&mut self, op: token::BinOpToken) -> TokenKind { self.bump(); if self.ch_is('=') { self.bump(); @@ -859,7 +811,7 @@ impl<'a> StringReader<'a> { /// Returns the next token from the string, advances the input past that /// token, and updates the interner - fn next_token_inner(&mut self) -> Result<Token, ()> { + fn next_token_inner(&mut self) -> Result<TokenKind, ()> { let c = self.ch; if ident_start(c) { @@ -897,17 +849,17 @@ impl<'a> StringReader<'a> { return Ok(self.with_str_from(start, |string| { // FIXME: perform NFKC normalization here. (Issue #2253) - let ident = self.mk_ident(string); + let name = ast::Name::intern(string); if is_raw_ident { let span = self.mk_sp(raw_start, self.pos); - if !ident.can_be_raw() { - self.err_span(span, &format!("`{}` cannot be a raw identifier", ident)); + if !name.can_be_raw() { + self.err_span(span, &format!("`{}` cannot be a raw identifier", name)); } self.sess.raw_identifier_spans.borrow_mut().push(span); } - token::Ident(ident, is_raw_ident) + token::Ident(name, is_raw_ident) })); } } @@ -916,7 +868,7 @@ impl<'a> StringReader<'a> { let (kind, symbol) = self.scan_number(c.unwrap()); let suffix = self.scan_optional_raw_name(); debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix); - return Ok(Token::lit(kind, symbol, suffix)); + return Ok(TokenKind::lit(kind, symbol, suffix)); } match c.expect("next_token_inner called at EOF") { @@ -1077,16 +1029,9 @@ impl<'a> StringReader<'a> { let symbol = self.name_from(start); self.bump(); self.validate_char_escape(start_with_quote); - return Ok(Token::lit(token::Char, symbol, None)); + return Ok(TokenKind::lit(token::Char, symbol, None)); } - // Include the leading `'` in the real identifier, for macro - // expansion purposes. See #12512 for the gory details of why - // this is necessary. - let ident = self.with_str_from(start_with_quote, |lifetime_name| { - self.mk_ident(lifetime_name) - }); - if starts_with_number { // this is a recovered lifetime written `'1`, error but accept it self.err_span_( @@ -1096,13 +1041,16 @@ impl<'a> StringReader<'a> { ); } - return Ok(token::Lifetime(ident)); + // Include the leading `'` in the real identifier, for macro + // expansion purposes. See #12512 for the gory details of why + // this is necessary. + return Ok(token::Lifetime(self.name_from(start_with_quote))); } let msg = "unterminated character literal"; let symbol = self.scan_single_quoted_string(start_with_quote, msg); self.validate_char_escape(start_with_quote); let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(token::Char, symbol, suffix)) + Ok(TokenKind::lit(token::Char, symbol, suffix)) } 'b' => { self.bump(); @@ -1127,7 +1075,7 @@ impl<'a> StringReader<'a> { }; let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(kind, symbol, suffix)) + Ok(TokenKind::lit(kind, symbol, suffix)) } '"' => { let start_with_quote = self.pos; @@ -1135,7 +1083,7 @@ impl<'a> StringReader<'a> { let symbol = self.scan_double_quoted_string(msg); self.validate_str_escape(start_with_quote); let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(token::Str, symbol, suffix)) + Ok(TokenKind::lit(token::Str, symbol, suffix)) } 'r' => { let start_bpos = self.pos; @@ -1213,7 +1161,7 @@ impl<'a> StringReader<'a> { }; let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix)) + Ok(TokenKind::lit(token::StrRaw(hash_count), symbol, suffix)) } '-' => { if self.nextch_is('>') { @@ -1610,27 +1558,26 @@ mod tests { &sh, "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); - let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().tok, token::Comment); - assert_eq!(string_reader.next_token().tok, token::Whitespace); + assert_eq!(string_reader.next_token(), token::Comment); + assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); - let tok2 = TokenAndSpan { - tok: token::Ident(id, false), - sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), - }; - assert_eq!(tok1.tok, tok2.tok); - assert_eq!(tok1.sp, tok2.sp); - assert_eq!(string_reader.next_token().tok, token::Whitespace); + let tok2 = Token::new( + token::Ident(Symbol::intern("fn"), false), + Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + ); + assert_eq!(tok1.kind, tok2.kind); + assert_eq!(tok1.span, tok2.span); + assert_eq!(string_reader.next_token(), token::Whitespace); // the 'main' id is already read: assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: let tok3 = string_reader.next_token(); - let tok4 = TokenAndSpan { - tok: mk_ident("main"), - sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), - }; - assert_eq!(tok3.tok, tok4.tok); - assert_eq!(tok3.sp, tok4.sp); + let tok4 = Token::new( + mk_ident("main"), + Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + ); + assert_eq!(tok3.kind, tok4.kind); + assert_eq!(tok3.span, tok4.span); // the lparen is already read: assert_eq!(string_reader.pos.clone(), BytePos(29)) }) @@ -1638,19 +1585,19 @@ mod tests { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) - fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<Token>) { + fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) { for expected_tok in &expected { - assert_eq!(&string_reader.next_token().tok, expected_tok); + assert_eq!(&string_reader.next_token(), expected_tok); } } // make the identifier by looking up the string in the interner - fn mk_ident(id: &str) -> Token { - Token::from_ast_ident(Ident::from_str(id)) + fn mk_ident(id: &str) -> TokenKind { + TokenKind::from_ast_ident(Ident::from_str(id)) } - fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token { - Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern)) + fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind { + TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern)) } #[test] @@ -1698,7 +1645,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None)); }) } @@ -1708,7 +1655,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None)); }) } @@ -1718,7 +1665,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(), mk_lit(token::Char, "\\n", None)); }) } @@ -1728,8 +1675,8 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok, - token::Lifetime(Ident::from_str("'abc"))); + assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(), + token::Lifetime(Symbol::intern("'abc"))); }) } @@ -1738,7 +1685,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(), mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None)); }) } @@ -1750,10 +1697,10 @@ mod tests { let sh = mk_sess(sm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok, + assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, Some("suffix"))); // with a whitespace separator: - assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok, + assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, None)); }} } @@ -1768,11 +1715,11 @@ mod tests { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(), mk_lit(token::Integer, "2", Some("us"))); - assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(), mk_lit(token::StrRaw(3), "raw", Some("suffix"))); - assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(), mk_lit(token::ByteStrRaw(3), "raw", Some("suffix"))); }) } @@ -1790,11 +1737,8 @@ mod tests { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().tok { - token::Comment => {} - _ => panic!("expected a comment!"), - } - assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None)); + assert_eq!(lexer.next_token(), token::Comment); + assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None)); }) } @@ -1805,11 +1749,10 @@ mod tests { let sh = mk_sess(sm.clone()); let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); - assert_eq!(comment.tok, token::Comment); - assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().tok, token::Whitespace); - assert_eq!(lexer.next_token().tok, - token::DocComment(Symbol::intern("/// test"))); + assert_eq!(comment.kind, token::Comment); + assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7))); + assert_eq!(lexer.next_token(), token::Whitespace); + assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test"))); }) } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 4bfc5bb16c0..b809f99beba 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -2,15 +2,15 @@ use syntax_pos::Span; use crate::print::pprust::token_to_string; use crate::parse::lexer::{StringReader, UnmatchedBrace}; -use crate::parse::{token, PResult}; +use crate::parse::token::{self, Token}; +use crate::parse::PResult; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; impl<'a> StringReader<'a> { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) { let mut tt_reader = TokenTreesReader { string_reader: self, - token: token::Eof, - span: syntax_pos::DUMMY_SP, + token: Token::dummy(), open_braces: Vec::new(), unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), @@ -23,8 +23,7 @@ impl<'a> StringReader<'a> { struct TokenTreesReader<'a> { string_reader: StringReader<'a>, - token: token::Token, - span: Span, + token: Token, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, unmatched_braces: Vec<UnmatchedBrace>, @@ -52,7 +51,7 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { - if let token::CloseDelim(..) = self.token { + if let token::CloseDelim(..) = self.token.kind { return TokenStream::new(tts); } @@ -68,11 +67,11 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { let sm = self.string_reader.sess.source_map(); - match self.token { + match self.token.kind { token::Eof => { let msg = "this file contains an un-closed delimiter"; let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span, msg); + .struct_span_err(self.token.span, msg); for &(_, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); } @@ -102,10 +101,10 @@ impl<'a> TokenTreesReader<'a> { }, token::OpenDelim(delim) => { // The span for beginning of the delimited section - let pre_span = self.span; + let pre_span = self.token.span; // Parse the open delimiter. - self.open_braces.push((delim, self.span)); + self.open_braces.push((delim, self.token.span)); self.real_token(); // Parse the token trees within the delimiters. @@ -114,9 +113,9 @@ impl<'a> TokenTreesReader<'a> { let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree - let delim_span = DelimSpan::from_pair(pre_span, self.span); + let delim_span = DelimSpan::from_pair(pre_span, self.token.span); - match self.token { + match self.token.kind { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); @@ -126,7 +125,7 @@ impl<'a> TokenTreesReader<'a> { self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.span), + (open_brace, open_brace_span, self.token.span), ); } // Parse the close delimiter. @@ -136,16 +135,16 @@ impl<'a> TokenTreesReader<'a> { token::CloseDelim(other) => { let mut unclosed_delimiter = None; let mut candidate = None; - if self.last_unclosed_found_span != Some(self.span) { + if self.last_unclosed_found_span != Some(self.token.span) { // do not complain about the same unclosed delimiter multiple times - self.last_unclosed_found_span = Some(self.span); + self.last_unclosed_found_span = Some(self.token.span); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { unclosed_delimiter = Some(sp); }; - if let Some(current_padding) = sm.span_to_margin(self.span) { + if let Some(current_padding) = sm.span_to_margin(self.token.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding @@ -159,7 +158,7 @@ impl<'a> TokenTreesReader<'a> { self.unmatched_braces.push(UnmatchedBrace { expected_delim: tok, found_delim: other, - found_span: self.span, + found_span: self.token.span, unclosed_span: unclosed_delimiter, candidate_span: candidate, }); @@ -198,12 +197,12 @@ impl<'a> TokenTreesReader<'a> { let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span, &msg); - err.span_label(self.span, "unexpected close delimiter"); + .struct_span_err(self.token.span, &msg); + err.span_label(self.token.span, "unexpected close delimiter"); Err(err) }, _ => { - let tt = TokenTree::Token(self.span, self.token.clone()); + let tt = TokenTree::Token(self.token.take()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. @@ -219,8 +218,6 @@ impl<'a> TokenTreesReader<'a> { } fn real_token(&mut self) { - let t = self.string_reader.real_token(); - self.token = t.tok; - self.span = t.sp; + self.token = self.string_reader.real_token(); } } diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 18019a89130..7d5356ffe4d 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -1,9 +1,9 @@ //! Code related to parsing literals. -use crate::ast::{self, Ident, Lit, LitKind}; +use crate::ast::{self, Lit, LitKind}; use crate::parse::parser::Parser; use crate::parse::PResult; -use crate::parse::token::{self, Token}; +use crate::parse::token::{self, Token, TokenKind}; use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; @@ -228,10 +228,10 @@ impl Lit { } /// Converts arbitrary token into an AST literal. - crate fn from_token(token: &Token, span: Span) -> Result<Lit, LitError> { - let lit = match *token { - token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False => - token::Lit::new(token::Bool, ident.name, None), + crate fn from_token(token: &Token) -> Result<Lit, LitError> { + let lit = match token.kind { + token::Ident(name, false) if name == kw::True || name == kw::False => + token::Lit::new(token::Bool, name, None), token::Literal(lit) => lit, token::Interpolated(ref nt) => { @@ -245,7 +245,7 @@ impl Lit { _ => return Err(LitError::NotLiteral) }; - Lit::from_lit_token(lit, span) + Lit::from_lit_token(lit, token.span) } /// Attempts to recover an AST literal from semantic literal. @@ -258,10 +258,10 @@ impl Lit { /// Losslessly convert an AST literal into a token stream. crate fn tokens(&self) -> TokenStream { let token = match self.token.kind { - token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false), + token::Bool => token::Ident(self.token.symbol, false), _ => token::Literal(self.token), }; - TokenTree::Token(self.span, token).into() + TokenTree::token(token, self.span).into() } } @@ -272,44 +272,43 @@ impl<'a> Parser<'a> { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. recovered = self.look_ahead(1, |t| { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) + = t.kind { let next_span = self.look_ahead_span(1); if self.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); - let token = Token::lit(token::Float, Symbol::intern(&s), suffix); - return Some((token, self.span.to(next_span))); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + return Some(Token::new(kind, self.span.to(next_span))); } } None }); - if let Some((ref token, span)) = recovered { + if let Some(token) = &recovered { self.bump(); self.diagnostic() - .struct_span_err(span, "float literals must have an integer part") + .struct_span_err(token.span, "float literals must have an integer part") .span_suggestion( - span, + token.span, "must have an integer part", - pprust::token_to_string(&token), + pprust::token_to_string(token), Applicability::MachineApplicable, ) .emit(); } } - let (token, span) = recovered.as_ref().map_or((&self.token, self.span), - |(token, span)| (token, *span)); - - match Lit::from_token(token, span) { + let token = recovered.as_ref().unwrap_or(&self.token); + match Lit::from_token(token) { Ok(lit) => { self.bump(); Ok(lit) } Err(LitError::NotLiteral) => { let msg = format!("unexpected token: {}", self.this_token_descr()); - Err(self.span_fatal(span, &msg)) + Err(self.span_fatal(token.span, &msg)) } Err(err) => { - let lit = token.expect_lit(); + let (lit, span) = (token.expect_lit(), token.span); self.bump(); err.report(&self.sess.span_diagnostic, lit, span); let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index f7a7aba9ecb..063823bbf4d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -5,7 +5,8 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use crate::source_map::{SourceMap, FilePathMapping}; use crate::feature_gate::UnstableFeatures; use crate::parse::parser::Parser; -use crate::syntax::parse::parser::emit_unclosed_delims; +use crate::parse::parser::emit_unclosed_delims; +use crate::parse::token::TokenKind; use crate::tokenstream::{TokenStream, TokenTree}; use crate::diagnostics::plugin::ErrorMap; use crate::print::pprust::token_to_string; @@ -239,7 +240,7 @@ fn maybe_source_file_to_parser( let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { - parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); + parser.token.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } Ok(parser) @@ -311,7 +312,7 @@ pub fn maybe_file_to_stream( for unmatched in unmatched_braces { let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", - token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + token_to_string(&token::CloseDelim(unmatched.found_delim)), )); db.span_label(unmatched.found_span, "incorrect close delimiter"); if let Some(sp) = unmatched.candidate_span { @@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>( /// A sequence separator. pub struct SeqSep { /// The seperator token. - pub sep: Option<token::Token>, + pub sep: Option<TokenKind>, /// `true` if a trailing separator is allowed. pub trailing_sep_allowed: bool, } impl SeqSep { - pub fn trailing_allowed(t: token::Token) -> SeqSep { + pub fn trailing_allowed(t: TokenKind) -> SeqSep { SeqSep { sep: Some(t), trailing_sep_allowed: true, @@ -382,10 +383,12 @@ impl SeqSep { #[cfg(test)] mod tests { use super::*; - use crate::ast::{self, Ident, PatKind}; + use crate::ast::{self, Name, PatKind}; use crate::attr::first_attr_value_str_by_name; use crate::ptr::P; + use crate::parse::token::Token; use crate::print::pprust::item_to_string; + use crate::symbol::{kw, sym}; use crate::tokenstream::{DelimSpan, TokenTree}; use crate::util::parser_testing::string_to_stream; use crate::util::parser_testing::{string_to_expr, string_to_item}; @@ -417,8 +420,6 @@ mod tests { #[test] fn string_to_tts_macro () { with_default_globals(|| { - use crate::symbol::sym; - let tts: Vec<_> = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: &[TokenTree] = &tts[..]; @@ -426,19 +427,20 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), - Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip, false))), + Some(&TokenTree::Token(Token { + kind: token::Ident(name_macro_rules, false), .. + })), + Some(&TokenTree::Token(Token { kind: token::Not, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), ) - if name_macro_rules.name == sym::macro_rules - && name_zip.name.as_str() == "zip" => { + if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => { let tts = ¯o_tts.trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, Some(&TokenTree::Delimited(_, first_delim, ref first_tts)), - Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })), Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) if macro_delim == token::Paren => { @@ -446,20 +448,24 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, false))), + Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), + Some(&TokenTree::Token(Token { + kind: token::Ident(name, false), .. + })), ) - if first_delim == token::Paren && ident.name.as_str() == "a" => {}, + if first_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } let tts = &second_tts.trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, false))), + Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), + Some(&TokenTree::Token(Token { + kind: token::Ident(name, false), .. + })), ) - if second_delim == token::Paren && ident.name.as_str() == "a" => {}, + if second_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), } }, @@ -477,26 +483,23 @@ mod tests { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), + TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(), + TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), token::DelimToken::Paren, TokenStream::new(vec![ - TokenTree::Token(sp(6, 7), - token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), - token::Ident(Ident::from_str("i32"), false)).into(), + TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(), + TokenTree::token(token::Colon, sp(8, 9)).into(), + TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(), ]).into(), ).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ - TokenTree::Token(sp(17, 18), - token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), + TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(), + TokenTree::token(token::Semi, sp(18, 19)).into(), ]).into(), ).into() ]); @@ -603,8 +606,6 @@ mod tests { #[test] fn crlf_doc_comments() { with_default_globals(|| { - use crate::symbol::sym; - let sess = ParseSess::new(FilePathMapping::empty()); let name_1 = FileName::Custom("crlf_source_1".to_string()); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 790013f6eb1..43e7c9330e4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -36,9 +36,9 @@ use crate::{ast, attr}; use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{SeqSep, classify, literal, token}; -use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; +use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::parse::token::DelimToken; +use crate::parse::token::{Token, TokenKind, DelimToken}; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use crate::util::parser::{AssocOp, Fixity}; use crate::print::pprust; @@ -57,6 +57,7 @@ use log::debug; use std::borrow::Cow; use std::cmp; use std::mem; +use std::ops::Deref; use std::path::{self, Path, PathBuf}; use std::slice; @@ -121,7 +122,7 @@ crate enum BlockMode { /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); @@ -147,7 +148,7 @@ macro_rules! maybe_whole_expr { /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { if let token::$constructor(x) = &**nt { let $x = x.clone(); $p.bump(); @@ -161,7 +162,7 @@ macro_rules! maybe_whole { macro_rules! maybe_recover_from_interpolated_ty_qpath { ($self: expr, $allow_qpath_recovery: expr) => { if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) { - if let token::Interpolated(nt) = &$self.token { + if let token::Interpolated(nt) = &$self.token.kind { if let token::NtTy(ty) = &**nt { let ty = ty.clone(); $self.bump(); @@ -196,14 +197,17 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current token. - pub token: token::Token, - /// The span of the current token. - pub span: Span, + /// The current normalized token. + /// "Normalized" means that some interpolated tokens + /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced + /// with non-interpolated identifier and lifetime tokens they refer to. + /// Perhaps the normalized / non-normalized setup can be simplified somehow. + pub token: Token, + /// Span of the current non-normalized token. meta_var_span: Option<Span>, - /// The span of the previous token. + /// Span of the previous non-normalized token. pub prev_span: Span, - /// The kind of the previous troken. + /// Kind of the previous normalized token (in simplified form). prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. @@ -242,6 +246,15 @@ impl<'a> Drop for Parser<'a> { } } +// FIXME: Parser uses `self.span` all the time. +// Remove this impl if you think that using `self.token.span` instead is acceptable. +impl Deref for Parser<'_> { + type Target = Token; + fn deref(&self) -> &Self::Target { + &self.token + } +} + #[derive(Clone)] crate struct TokenCursor { crate frame: TokenCursorFrame, @@ -295,7 +308,7 @@ impl TokenCursorFrame { } impl TokenCursor { - fn next(&mut self) -> TokenAndSpan { + fn next(&mut self) -> Token { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; @@ -309,7 +322,7 @@ impl TokenCursor { self.frame = frame; continue } else { - return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP } + return Token::new(token::Eof, DUMMY_SP); }; match self.frame.last_token { @@ -318,7 +331,7 @@ impl TokenCursor { } match tree { - TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, + TokenTree::Token(token) => return token, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); @@ -327,9 +340,9 @@ impl TokenCursor { } } - fn next_desugared(&mut self) -> TokenAndSpan { - let (sp, name) = match self.next() { - TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), + fn next_desugared(&mut self) -> Token { + let (name, sp) = match self.next() { + Token { kind: token::DocComment(name), span } => (name, span), tok => return tok, }; @@ -353,11 +366,11 @@ impl TokenCursor { delim_span, token::Bracket, [ - TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Token::lit( + TokenTree::token(token::Ident(sym::doc, false), sp), + TokenTree::token(token::Eq, sp), + TokenTree::token(TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None - )), + ), sp), ] .iter().cloned().collect::<TokenStream>().into(), ); @@ -366,10 +379,10 @@ impl TokenCursor { delim_span, token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { - [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] + [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] .iter().cloned().collect::<TokenStream>().into() } else { - [TokenTree::Token(sp, token::Pound), body] + [TokenTree::token(token::Pound, sp), body] .iter().cloned().collect::<TokenStream>().into() }, ))); @@ -380,7 +393,7 @@ impl TokenCursor { #[derive(Clone, PartialEq)] crate enum TokenType { - Token(token::Token), + Token(TokenKind), Keyword(Symbol), Operator, Lifetime, @@ -410,7 +423,7 @@ impl TokenType { /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. -fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { +fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } @@ -468,8 +481,7 @@ impl<'a> Parser<'a> { ) -> Self { let mut parser = Parser { sess, - token: token::Whitespace, - span: DUMMY_SP, + token: Token::dummy(), prev_span: DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, @@ -498,9 +510,7 @@ impl<'a> Parser<'a> { subparser_name, }; - let tok = parser.next_tok(); - parser.token = tok.tok; - parser.span = tok.sp; + parser.token = parser.next_tok(); if let Some(directory) = directory { parser.directory = directory; @@ -515,15 +525,15 @@ impl<'a> Parser<'a> { parser } - fn next_tok(&mut self) -> TokenAndSpan { + fn next_tok(&mut self) -> Token { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; - if next.sp.is_dummy() { + if next.span.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. - next.sp = self.prev_span.with_ctxt(next.sp.ctxt()); + next.span = self.prev_span.with_ctxt(next.span.ctxt()); } next } @@ -534,10 +544,10 @@ impl<'a> Parser<'a> { } crate fn token_descr(&self) -> Option<&'static str> { - Some(match &self.token { - t if t.is_special_ident() => "reserved identifier", - t if t.is_used_keyword() => "keyword", - t if t.is_unused_keyword() => "reserved keyword", + Some(match &self.token.kind { + _ if self.token.is_special_ident() => "reserved identifier", + _ if self.token.is_used_keyword() => "keyword", + _ if self.token.is_unused_keyword() => "reserved keyword", token::DocComment(..) => "doc comment", _ => return None, }) @@ -559,7 +569,7 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); @@ -577,8 +587,8 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of( &mut self, - edible: &[token::Token], - inedible: &[token::Token], + edible: &[TokenKind], + inedible: &[TokenKind], ) -> PResult<'a, bool /* recovered */> { if edible.contains(&self.token) { self.bump(); @@ -612,8 +622,8 @@ impl<'a> Parser<'a> { } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(ident, _) => { + match self.token.kind { + token::Ident(name, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { @@ -624,7 +634,7 @@ impl<'a> Parser<'a> { } let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { @@ -640,14 +650,14 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &token::Token) -> bool { + crate fn check(&mut self, tok: &TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consumes a token 'tok' if it exists. Returns whether the given token was present. - pub fn eat(&mut self, tok: &token::Token) -> bool { + pub fn eat(&mut self, tok: &TokenKind) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present @@ -732,7 +742,7 @@ impl<'a> Parser<'a> { /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); - match self.token { + match self.token.kind { token::BinOp(token::Plus) => { self.bump(); true @@ -763,7 +773,7 @@ impl<'a> Parser<'a> { /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); - match self.token { + match self.token.kind { token::BinOp(token::And) => { self.bump(); Ok(()) @@ -780,7 +790,7 @@ impl<'a> Parser<'a> { /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); - match self.token { + match self.token.kind { token::BinOp(token::Or) => { self.bump(); Ok(()) @@ -805,7 +815,7 @@ impl<'a> Parser<'a> { /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); - let ate = match self.token { + let ate = match self.token.kind { token::Lt => { self.bump(); true @@ -845,7 +855,7 @@ impl<'a> Parser<'a> { /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); - let ate = match self.token { + let ate = match self.token.kind { token::Gt => { self.bump(); Some(()) @@ -883,7 +893,7 @@ impl<'a> Parser<'a> { /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end<T, F>(&mut self, - ket: &token::Token, + ket: &TokenKind, sep: SeqSep, f: F) -> PResult<'a, Vec<T>> where @@ -901,7 +911,7 @@ impl<'a> Parser<'a> { /// closing bracket. pub fn parse_seq_to_before_end<T, F>( &mut self, - ket: &token::Token, + ket: &TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, (Vec<T>, bool)> @@ -912,7 +922,7 @@ impl<'a> Parser<'a> { crate fn parse_seq_to_before_tokens<T, F>( &mut self, - kets: &[&token::Token], + kets: &[&TokenKind], sep: SeqSep, expect: TokenExpectType, mut f: F, @@ -928,7 +938,7 @@ impl<'a> Parser<'a> { TokenExpectType::NoExpect => self.token == **k, } }) { - match self.token { + match self.token.kind { token::CloseDelim(..) | token::Eof => break, _ => {} }; @@ -986,8 +996,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq<T, F>( &mut self, - bra: &token::Token, - ket: &token::Token, + bra: &TokenKind, + ket: &TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, Vec<T>> where @@ -1011,7 +1021,7 @@ impl<'a> Parser<'a> { self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. - self.prev_token_kind = match self.token { + self.prev_token_kind = match self.token.kind { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, @@ -1022,9 +1032,7 @@ impl<'a> Parser<'a> { _ => PrevTokenKind::Other, }; - let next = self.next_tok(); - self.span = next.sp; - self.token = next.tok; + self.token = self.next_tok(); self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); @@ -1032,30 +1040,31 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - fn bump_with(&mut self, next: token::Token, span: Span) { + fn bump_with(&mut self, next: TokenKind, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.span = span; - self.token = next; + self.token = Token::new(next, span); self.expected_tokens.clear(); } pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::Token) -> R, + F: FnOnce(&Token) -> R, { if dist == 0 { - return f(&self.token) + return f(&self.token); } - f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { + let frame = &self.token_cursor.frame; + f(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(_, tok) => tok, - TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), - }, - None => token::CloseDelim(self.token_cursor.frame.delim), + TokenTree::Token(token) => token, + TokenTree::Delimited(dspan, delim, _) => + Token::new(token::OpenDelim(delim), dspan.open), + } + None => Token::new(token::CloseDelim(frame.delim), frame.span.close) }) } @@ -1065,7 +1074,7 @@ impl<'a> Parser<'a> { } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { - Some(TokenTree::Token(span, _)) => span, + Some(TokenTree::Token(token)) => token.span, Some(TokenTree::Delimited(span, ..)) => span.entire(), None => self.look_ahead_span(dist - 1), } @@ -1209,7 +1218,7 @@ impl<'a> Parser<'a> { decl, }; - let body = match self.token { + let body = match self.token.kind { token::Semi => { self.bump(); *at_end = true; @@ -1477,7 +1486,7 @@ impl<'a> Parser<'a> { } fn is_named_argument(&self) -> bool { - let offset = match self.token { + let offset = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, @@ -1612,22 +1621,22 @@ impl<'a> Parser<'a> { } fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(ident, _) if self.token.is_path_segment_keyword() => { + match self.token.kind { + token::Ident(name, _) if name.is_path_segment_keyword() => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } } fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(ident, false) if ident.name == kw::Underscore => { + match self.token.kind { + token::Ident(name, false) if name == kw::Underscore => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } @@ -1710,7 +1719,7 @@ impl<'a> Parser<'a> { /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { - let meta_ident = match self.token { + let meta_ident = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.path.clone()), @@ -1763,7 +1772,7 @@ impl<'a> Parser<'a> { fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; - let is_args_start = |token: &token::Token| match *token { + let is_args_start = |token: &TokenKind| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) | token::LArrow => true, _ => false, @@ -1859,7 +1868,8 @@ impl<'a> Parser<'a> { } fn parse_field_name(&mut self) -> PResult<'a, Ident> { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = + self.token.kind { self.expect_no_suffix(self.span, "a tuple index", suffix); self.bump(); Ok(Ident::new(symbol, self.prev_span)) @@ -1949,7 +1959,7 @@ impl<'a> Parser<'a> { } fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { - let delim = match self.token { + let delim = match self.token.kind { token::OpenDelim(delim) => delim, _ => { let msg = "expected open delimiter"; @@ -1992,8 +2002,8 @@ impl<'a> Parser<'a> { let ex: ExprKind; - // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr(). - match self.token { + // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr(). + match self.token.kind { token::OpenDelim(token::Paren) => { self.bump(); @@ -2363,13 +2373,11 @@ impl<'a> Parser<'a> { } let mut recovery_field = None; - if let token::Ident(ident, _) = self.token { + if let token::Ident(name, _) = self.token.kind { if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { // Use in case of error after field-looking code: `S { foo: () with a }` - let mut ident = ident.clone(); - ident.span = self.span; recovery_field = Some(ast::Field { - ident, + ident: Ident::new(name, self.span), span: self.span, expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()), is_shorthand: false, @@ -2503,7 +2511,7 @@ impl<'a> Parser<'a> { let segment = self.parse_path_segment(PathStyle::Expr)?; self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren)); - Ok(match self.token { + Ok(match self.token.kind { token::OpenDelim(token::Paren) => { // Method call `expr.f()` let mut args = self.parse_unspanned_seq( @@ -2542,7 +2550,7 @@ impl<'a> Parser<'a> { // expr.f if self.eat(&token::Dot) { - match self.token { + match self.token.kind { token::Ident(..) => { e = self.parse_dot_suffix(e, lo)?; } @@ -2594,7 +2602,7 @@ impl<'a> Parser<'a> { continue; } if self.expr_is_complete(&e) { break; } - match self.token { + match self.token.kind { // expr(...) token::OpenDelim(token::Paren) => { let seq = self.parse_unspanned_seq( @@ -2627,12 +2635,12 @@ impl<'a> Parser<'a> { } crate fn process_potential_macro_variable(&mut self) { - let (token, span) = match self.token { + self.token = match self.token.kind { token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); - let name = match self.token { - token::Ident(ident, _) => ident, + let name = match self.token.kind { + token::Ident(name, _) => name, _ => unreachable!() }; let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); @@ -2646,24 +2654,24 @@ impl<'a> Parser<'a> { // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), - token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), + token::NtIdent(ident, is_raw) => + Token::new(token::Ident(ident.name, is_raw), ident.span), + token::NtLifetime(ident) => + Token::new(token::Lifetime(ident.name), ident.span), _ => return, } } _ => return, }; - self.token = token; - self.span = span; } /// Parses a single token tree from the input. crate fn parse_token_tree(&mut self) -> TokenTree { - match self.token { + match self.token.kind { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); - self.span = frame.span.entire(); + self.token.span = frame.span.entire(); self.bump(); TokenTree::Delimited( frame.span, @@ -2673,9 +2681,9 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); + let token = self.token.take(); self.bump(); - TokenTree::Token(span, token) + TokenTree::Token(token) } } } @@ -2692,7 +2700,7 @@ impl<'a> Parser<'a> { pub fn parse_tokens(&mut self) -> TokenStream { let mut result = Vec::new(); loop { - match self.token { + match self.token.kind { token::Eof | token::CloseDelim(..) => break, _ => result.push(self.parse_token_tree().into()), } @@ -2706,8 +2714,8 @@ impl<'a> Parser<'a> { -> PResult<'a, P<Expr>> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; - // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() - let (hi, ex) = match self.token { + // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() + let (hi, ex) = match self.token.kind { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); @@ -2760,10 +2768,10 @@ impl<'a> Parser<'a> { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::Token| match *t { + let token_cannot_continue_expr = |t: &Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident - token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), + token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, token::Interpolated(ref nt) => match **nt { token::NtIdent(..) | token::NtExpr(..) | @@ -3040,7 +3048,7 @@ impl<'a> Parser<'a> { match self.parse_path(PathStyle::Expr) { Ok(path) => { - let (op_noun, op_verb) = match self.token { + let (op_noun, op_verb) = match self.token.kind { token::Lt => ("comparison", "comparing"), token::BinOp(token::Shl) => ("shift", "shifting"), _ => { @@ -3359,7 +3367,7 @@ impl<'a> Parser<'a> { let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { - if self.token == token::Token::Semi { + if self.token == token::Semi { e.span_suggestion_short( match_span, "try removing this `match`", @@ -3844,14 +3852,14 @@ impl<'a> Parser<'a> { // helper function to decide whether to parse as ident binding or to try to do // something more complex like range patterns fn parse_as_ident(&mut self) -> bool { - self.look_ahead(1, |t| match *t { + self.look_ahead(1, |t| match t.kind { token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false), // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the // range pattern branch token::DotDot => None, _ => Some(true), - }).unwrap_or_else(|| self.look_ahead(2, |t| match *t { + }).unwrap_or_else(|| self.look_ahead(2, |t| match t.kind { token::Comma | token::CloseDelim(token::Bracket) => true, _ => false, })) @@ -3914,14 +3922,13 @@ impl<'a> Parser<'a> { let lo = self.span; let pat; - match self.token { + match self.token.kind { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; let mutbl = self.parse_mutability(); - if let token::Lifetime(ident) = self.token { - let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", - ident)); + if let token::Lifetime(name) = self.token.kind { + let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name)); err.span_label(self.span, "unexpected lifetime"); return Err(err); } @@ -3990,7 +3997,7 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - match self.token { + match self.token.kind { token::Not if qself.is_none() => { // Parse macro invocation self.bump(); @@ -3999,7 +4006,7 @@ impl<'a> Parser<'a> { pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDotEq | token::DotDot => { - let end_kind = match self.token { + let end_kind = match self.token.kind { token::DotDot => RangeEnd::Excluded, token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot), token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq), @@ -4325,8 +4332,8 @@ impl<'a> Parser<'a> { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) -> PResult<'a, Option<P<Item>>> { let token_lo = self.span; - let (ident, def) = match self.token { - token::Ident(ident, false) if ident.name == kw::Macro => { + let (ident, def) = match self.token.kind { + token::Ident(name, false) if name == kw::Macro => { self.bump(); let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { @@ -4344,7 +4351,7 @@ impl<'a> Parser<'a> { }; TokenStream::new(vec![ args.into(), - TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), + TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(), body.into(), ]) } else { @@ -4354,8 +4361,8 @@ impl<'a> Parser<'a> { (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) } - token::Ident(ident, _) if ident.name == sym::macro_rules && - self.look_ahead(1, |t| *t == token::Not) => { + token::Ident(name, _) if name == sym::macro_rules && + self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis.node, prev_span); self.bump(); @@ -4436,7 +4443,7 @@ impl<'a> Parser<'a> { } // it's a macro invocation - let id = match self.token { + let id = match self.token.kind { token::OpenDelim(_) => Ident::invalid(), // no special identifier _ => self.parse_ident()?, }; @@ -4444,7 +4451,7 @@ impl<'a> Parser<'a> { // check that we're pointing at delimiters (need to check // again after the `if`, because of `parse_ident` // consuming more tokens). - match self.token { + match self.token.kind { token::OpenDelim(_) => {} _ => { // we only expect an ident if we didn't parse one @@ -4481,7 +4488,9 @@ impl<'a> Parser<'a> { // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { + else if macro_legacy_warnings && + self.token.can_begin_expr() && + match self.token.kind { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -4779,7 +4788,7 @@ impl<'a> Parser<'a> { let mut last_plus_span = None; let mut was_negative = false; loop { - // This needs to be synchronized with `Token::can_begin_bound`. + // This needs to be synchronized with `TokenKind::can_begin_bound`. let is_bound_start = self.check_path() || self.check_lifetime() || self.check(&token::Not) || // used for error reporting only self.check(&token::Question) || @@ -5250,7 +5259,7 @@ impl<'a> Parser<'a> { assoc_ty_constraints.push(span); } else if self.check_const_arg() { // Parse const argument. - let expr = if let token::OpenDelim(token::Brace) = self.token { + let expr = if let token::OpenDelim(token::Brace) = self.token.kind { self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())? } else if self.token.is_ident() { // FIXME(const_generics): to distinguish between idents for types and consts, @@ -5477,10 +5486,10 @@ impl<'a> Parser<'a> { /// Returns the parsed optional self argument and whether a self shortcut was used. fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> { - let expect_ident = |this: &mut Self| match this.token { + let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. - token::Ident(ident, _) => - { let span = this.span; this.bump(); Ident::new(ident.name, span) } + token::Ident(name, _) => + { let span = this.span; this.bump(); Ident::new(name, span) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { @@ -5492,7 +5501,7 @@ impl<'a> Parser<'a> { // Only a limited set of initial token sequences is considered `self` parameters; anything // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.span; - let (eself, eself_ident, eself_hi) = match self.token { + let (eself, eself_ident, eself_hi) = match self.token.kind { token::BinOp(token::And) => { // `&self` // `&mut self` @@ -5803,11 +5812,7 @@ impl<'a> Parser<'a> { match *vis { VisibilityKind::Inherited => {} _ => { - let is_macro_rules: bool = match self.token { - token::Ident(sid, _) => sid.name == sym::macro_rules, - _ => false, - }; - let mut err = if is_macro_rules { + let mut err = if self.token.is_keyword(sym::macro_rules) { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); err.span_suggestion( @@ -5918,9 +5923,9 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Brace))?; let mut trait_items = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { if self.look_ahead(1, - |tok| tok == &token::Token::CloseDelim(token::Brace)) { + |tok| tok == &token::CloseDelim(token::Brace)) { let mut err = self.diagnostic().struct_span_err_with_code( self.span, "found a documentation comment that doesn't document anything", @@ -6246,7 +6251,7 @@ impl<'a> Parser<'a> { if self.token == token::Comma { seen_comma = true; } - match self.token { + match self.token.kind { token::Comma => { self.bump(); } @@ -6413,7 +6418,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); @@ -6796,7 +6801,7 @@ impl<'a> Parser<'a> { let mut replacement = vec![]; let mut fixed_crate_name = false; // Accept `extern crate name-like-this` for better diagnostics - let dash = token::Token::BinOp(token::BinOpToken::Minus); + let dash = token::BinOp(token::BinOpToken::Minus); if self.token == dash { // Do not include `-` as part of the expected tokens list while self.eat(&dash) { fixed_crate_name = true; @@ -7011,7 +7016,7 @@ impl<'a> Parser<'a> { /// Parses a string as an ABI spec on an extern type or module. Consumes /// the `extern` keyword, if one is found. fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> { - match self.token { + match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) | token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => { let sp = self.span; @@ -7046,7 +7051,7 @@ impl<'a> Parser<'a> { if token.is_keyword(kw::Move) { return true; } - match *token { + match token.kind { token::BinOp(token::Or) | token::OrOr => true, _ => false, } @@ -7818,7 +7823,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { - let ret = match self.token { + let ret = match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => (symbol, ast::StrStyle::Cooked, suffix), token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) => @@ -7869,7 +7874,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler: for unmatched in unclosed_delims.iter() { let mut err = handler.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", - pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)), )); err.span_label(unmatched.found_span, "incorrect close delimiter"); if let Some(sp) = unmatched.candidate_span { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 47185df8d61..28a733728bf 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -2,22 +2,22 @@ pub use BinOpToken::*; pub use Nonterminal::*; pub use DelimToken::*; pub use LitKind::*; -pub use Token::*; +pub use TokenKind::*; use crate::ast::{self}; -use crate::parse::ParseSess; +use crate::parse::{parse_stream_from_source_str, ParseSess}; use crate::print::pprust; use crate::ptr::P; use crate::symbol::kw; -use crate::syntax::parse::parse_stream_from_source_str; use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree}; -use syntax_pos::symbol::{self, Symbol}; -use syntax_pos::{self, Span, FileName}; +use syntax_pos::symbol::Symbol; +use syntax_pos::{self, Span, FileName, DUMMY_SP}; use log::info; use std::fmt; use std::mem; +use std::ops::Deref; #[cfg(target_arch = "x86_64")] use rustc_data_structures::static_assert_size; use rustc_data_structures::sync::Lrc; @@ -117,8 +117,8 @@ impl Lit { } } -pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: Token = Ident(ident, is_raw); +pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool { + let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -145,11 +145,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { kw::While, kw::Yield, kw::Static, - ].contains(&ident.name) + ].contains(&name) } -fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: Token = Ident(ident, is_raw); +fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool { + let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -162,11 +162,11 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { kw::Extern, kw::Typeof, kw::Dyn, - ].contains(&ident.name) + ].contains(&name) } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] -pub enum Token { +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] +pub enum TokenKind { /* Expression-operator symbols. */ Eq, Lt, @@ -209,8 +209,8 @@ pub enum Token { Literal(Lit), /* Name components */ - Ident(ast::Ident, /* is_raw */ bool), - Lifetime(ast::Ident), + Ident(ast::Name, /* is_raw */ bool), + Lifetime(ast::Name), Interpolated(Lrc<Nonterminal>), @@ -231,14 +231,20 @@ pub enum Token { Eof, } -// `Token` is used a lot. Make sure it doesn't unintentionally get bigger. +// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] -static_assert_size!(Token, 16); +static_assert_size!(TokenKind, 16); -impl Token { - /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary. - pub fn from_ast_ident(ident: ast::Ident) -> Token { - Ident(ident, ident.is_raw_guess()) +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] +pub struct Token { + pub kind: TokenKind, + pub span: Span, +} + +impl TokenKind { + /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary. + pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { + Ident(ident.name, ident.is_raw_guess()) } crate fn is_like_plus(&self) -> bool { @@ -247,12 +253,14 @@ impl Token { _ => false, } } +} +impl Token { /// Returns `true` if the token can appear at the start of an expression. crate fn can_begin_expr(&self) -> bool { - match *self { - Ident(ident, is_raw) => - ident_can_begin_expr(ident, is_raw), // value name or keyword + match self.kind { + Ident(name, is_raw) => + ident_can_begin_expr(name, self.span, is_raw), // value name or keyword OpenDelim(..) | // tuple, array or block Literal(..) | // literal Not | // operator not @@ -282,9 +290,9 @@ impl Token { /// Returns `true` if the token can appear at the start of a type. crate fn can_begin_type(&self) -> bool { - match *self { - Ident(ident, is_raw) => - ident_can_begin_type(ident, is_raw), // type name or keyword + match self.kind { + Ident(name, is_raw) => + ident_can_begin_type(name, self.span, is_raw), // type name or keyword OpenDelim(Paren) | // tuple OpenDelim(Bracket) | // array Not | // never @@ -302,7 +310,9 @@ impl Token { _ => false, } } +} +impl TokenKind { /// Returns `true` if the token can appear at the start of a const param. pub fn can_begin_const_arg(&self) -> bool { match self { @@ -316,14 +326,18 @@ impl Token { _ => self.can_begin_literal_or_bool(), } } +} +impl Token { /// Returns `true` if the token can appear at the start of a generic bound. crate fn can_begin_bound(&self) -> bool { self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) || self == &Question || self == &OpenDelim(Paren) } +} - pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token { +impl TokenKind { + pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind { Literal(Lit::new(kind, symbol, suffix)) } @@ -348,8 +362,8 @@ impl Token { match *self { Literal(..) => true, BinOp(Minus) => true, - Ident(ident, false) if ident.name == kw::True => true, - Ident(ident, false) if ident.name == kw::False => true, + Ident(name, false) if name == kw::True => true, + Ident(name, false) if name == kw::False => true, Interpolated(ref nt) => match **nt { NtLiteral(..) => true, _ => false, @@ -357,11 +371,13 @@ impl Token { _ => false, } } +} +impl Token { /// Returns an identifier if this token is an identifier. pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { - match *self { - Ident(ident, is_raw) => Some((ident, is_raw)), + match self.kind { + Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)), Interpolated(ref nt) => match **nt { NtIdent(ident, is_raw) => Some((ident, is_raw)), _ => None, @@ -369,10 +385,11 @@ impl Token { _ => None, } } + /// Returns a lifetime identifier if this token is a lifetime. pub fn lifetime(&self) -> Option<ast::Ident> { - match *self { - Lifetime(ident) => Some(ident), + match self.kind { + Lifetime(name) => Some(ast::Ident::new(name, self.span)), Interpolated(ref nt) => match **nt { NtLifetime(ident) => Some(ident), _ => None, @@ -380,22 +397,44 @@ impl Token { _ => None, } } +} + +impl TokenKind { + /// Returns an identifier name if this token is an identifier. + pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> { + match *self { + Ident(name, is_raw) => Some((name, is_raw)), + Interpolated(ref nt) => match **nt { + NtIdent(ident, is_raw) => Some((ident.name, is_raw)), + _ => None, + }, + _ => None, + } + } + /// Returns a lifetime name if this token is a lifetime. + pub fn lifetime_name(&self) -> Option<ast::Name> { + match *self { + Lifetime(name) => Some(name), + Interpolated(ref nt) => match **nt { + NtLifetime(ident) => Some(ident.name), + _ => None, + }, + _ => None, + } + } /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { - self.ident().is_some() + self.ident_name().is_some() } /// Returns `true` if the token is a lifetime. crate fn is_lifetime(&self) -> bool { - self.lifetime().is_some() + self.lifetime_name().is_some() } /// Returns `true` if the token is a identifier whose name is the given /// string slice. crate fn is_ident_named(&self, name: Symbol) -> bool { - match self.ident() { - Some((ident, _)) => ident.name == name, - None => false - } + self.ident_name().map_or(false, |(ident_name, _)| ident_name == name) } /// Returns `true` if the token is an interpolated path. @@ -417,24 +456,30 @@ impl Token { crate fn is_qpath_start(&self) -> bool { self == &Lt || self == &BinOp(Shl) } +} +impl Token { crate fn is_path_start(&self) -> bool { self == &ModSep || self.is_qpath_start() || self.is_path() || self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident() } +} +impl TokenKind { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: Symbol) -> bool { - self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false) + self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_path_segment_keyword(), + match self.ident_name() { + Some((name, false)) => name.is_path_segment_keyword(), _ => false, } } +} +impl Token { // Returns true for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. pub fn is_special_ident(&self) -> bool { @@ -467,8 +512,10 @@ impl Token { _ => false, } } +} - crate fn glue(self, joint: Token) -> Option<Token> { +impl TokenKind { + crate fn glue(self, joint: TokenKind) -> Option<TokenKind> { Some(match self { Eq => match joint { Eq => EqEq, @@ -514,13 +561,7 @@ impl Token { _ => return None, }, SingleQuote => match joint { - Ident(ident, false) => { - let name = Symbol::intern(&format!("'{}", ident)); - Lifetime(symbol::Ident { - name, - span: ident.span, - }) - } + Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))), _ => return None, }, @@ -534,7 +575,7 @@ impl Token { /// Returns tokens that are likely to be typed accidentally instead of the current token. /// Enables better error recovery when the wrong token is found. - crate fn similar_tokens(&self) -> Option<Vec<Token>> { + crate fn similar_tokens(&self) -> Option<Vec<TokenKind>> { match *self { Comma => Some(vec![Dot, Lt, Semi]), Semi => Some(vec![Colon, Comma]), @@ -544,7 +585,7 @@ impl Token { // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality - crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool { + crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool { if mem::discriminant(self) != mem::discriminant(other) { return false } @@ -590,10 +631,10 @@ impl Token { (&Literal(a), &Literal(b)) => a == b, - (&Lifetime(a), &Lifetime(b)) => a.name == b.name, - (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name || - a.name == kw::DollarCrate || - c.name == kw::DollarCrate), + (&Lifetime(a), &Lifetime(b)) => a == b, + (&Ident(a, b), &Ident(c, d)) => b == d && (a == c || + a == kw::DollarCrate || + c == kw::DollarCrate), (&Interpolated(_), &Interpolated(_)) => false, @@ -602,6 +643,36 @@ impl Token { } } +impl Token { + crate fn new(kind: TokenKind, span: Span) -> Self { + Token { kind, span } + } + + /// Some token that will be thrown away later. + crate fn dummy() -> Self { + Token::new(TokenKind::Whitespace, DUMMY_SP) + } + + /// Return this token by value and leave a dummy token in its place. + crate fn take(&mut self) -> Self { + mem::replace(self, Token::dummy()) + } +} + +impl PartialEq<TokenKind> for Token { + fn eq(&self, rhs: &TokenKind) -> bool { + self.kind == *rhs + } +} + +// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`. +impl Deref for Token { + type Target = TokenKind; + fn deref(&self) -> &Self::Target { + &self.kind + } +} + #[derive(Clone, RustcEncodable, RustcDecodable)] /// For interpolation during macro expansion. pub enum Nonterminal { @@ -691,12 +762,10 @@ impl Nonterminal { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { - let token = Token::Ident(ident, is_raw); - Some(TokenTree::Token(ident.span, token).into()) + Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { - let token = Token::Lifetime(ident); - Some(TokenTree::Token(ident.span, token).into()) + Some(TokenTree::token(Lifetime(ident.name), ident.span).into()) } Nonterminal::NtTT(ref tt) => { Some(tt.clone().into()) @@ -743,7 +812,7 @@ impl Nonterminal { } } -crate fn is_op(tok: &Token) -> bool { +crate fn is_op(tok: &TokenKind) -> bool { match *tok { OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) | Lifetime(..) | Interpolated(..) | @@ -781,8 +850,8 @@ fn prepend_attrs(sess: &ParseSess, // For simple paths, push the identifier directly if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; - let token = Ident(ident, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::Token(ident.span, token)); + let token = Ident(ident.name, ident.as_str().starts_with("r#")); + brackets.push(tokenstream::TokenTree::token(token, ident.span)); // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. @@ -796,7 +865,7 @@ fn prepend_attrs(sess: &ParseSess, // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. - builder.push(tokenstream::TokenTree::Token(attr.span, Pound)); + builder.push(tokenstream::TokenTree::token(Pound, attr.span)); let delim_span = DelimSpan::from_single(attr.span); builder.push(tokenstream::TokenTree::Delimited( delim_span, DelimToken::Bracket, brackets.build().into())); |
