diff options
| author | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-04 20:42:43 +0300 |
|---|---|---|
| committer | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-06 14:03:15 +0300 |
| commit | e0127dbf8135b766a332ce21c4eee48998b59bef (patch) | |
| tree | 4a30906f1c8058e13fd426a56967e7cba9408bf7 /src/libsyntax/parse | |
| parent | a3425edb46dfcc7031068b8bdda868e5a3b16ae1 (diff) | |
| download | rust-e0127dbf8135b766a332ce21c4eee48998b59bef.tar.gz rust-e0127dbf8135b766a332ce21c4eee48998b59bef.zip | |
syntax: Use `Token` in `TokenTree::Token`
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 40 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/literal.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 31 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 20 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 21 |
7 files changed, 62 insertions, 56 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e99a86e807f..9b78b56041f 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -157,7 +157,7 @@ impl<'a> Parser<'a> { self.check(&token::OpenDelim(DelimToken::Brace)) { self.parse_token_tree().into() } else if self.eat(&token::Eq) { - let eq = TokenTree::Token(self.prev_span, token::Eq); + let eq = TokenTree::token(self.prev_span, token::Eq); let mut is_interpolated_expr = false; if let token::Interpolated(nt) = &self.token { if let token::NtExpr(..) = **nt { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 32d5b16dd71..225db0164fe 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1596,8 +1596,8 @@ mod tests { "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().kind, token::Comment); - assert_eq!(string_reader.next_token().kind, token::Whitespace); + assert_eq!(string_reader.next_token(), token::Comment); + assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = Token { kind: token::Ident(id, false), @@ -1605,7 +1605,7 @@ mod tests { }; assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.span, tok2.span); - assert_eq!(string_reader.next_token().kind, token::Whitespace); + assert_eq!(string_reader.next_token(), token::Whitespace); // the 'main' id is already read: assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: @@ -1625,7 +1625,7 @@ mod tests { // of tokens (stop checking after exhausting the expected vec) fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) { for expected_tok in &expected { - assert_eq!(&string_reader.next_token().kind, expected_tok); + assert_eq!(&string_reader.next_token(), expected_tok); } } @@ -1683,7 +1683,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None)); }) } @@ -1693,7 +1693,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None)); }) } @@ -1703,7 +1703,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(), mk_lit(token::Char, "\\n", None)); }) } @@ -1713,7 +1713,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(), token::Lifetime(Ident::from_str("'abc"))); }) } @@ -1723,7 +1723,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(), mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None)); }) } @@ -1735,10 +1735,10 @@ mod tests { let sh = mk_sess(sm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind, + assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, Some("suffix"))); // with a whitespace separator: - assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind, + assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, None)); }} } @@ -1753,11 +1753,11 @@ mod tests { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(), mk_lit(token::Integer, "2", Some("us"))); - assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(), mk_lit(token::StrRaw(3), "raw", Some("suffix"))); - assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(), mk_lit(token::ByteStrRaw(3), "raw", Some("suffix"))); }) } @@ -1775,11 +1775,8 @@ mod tests { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().kind { - token::Comment => {} - _ => panic!("expected a comment!"), - } - assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None)); + assert_eq!(lexer.next_token(), token::Comment); + assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None)); }) } @@ -1792,9 +1789,8 @@ mod tests { let comment = lexer.next_token(); assert_eq!(comment.kind, token::Comment); assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().kind, token::Whitespace); - assert_eq!(lexer.next_token().kind, - token::DocComment(Symbol::intern("/// test"))); + assert_eq!(lexer.next_token(), token::Whitespace); + assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test"))); }) } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 767d37016da..abff7177abd 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -203,7 +203,7 @@ impl<'a> TokenTreesReader<'a> { Err(err) }, _ => { - let tt = TokenTree::Token(self.span, self.token.clone()); + let tt = TokenTree::token(self.span, self.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 945475ff981..4b8ef20180f 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -261,7 +261,7 @@ impl Lit { token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false), _ => token::Literal(self.token), }; - TokenTree::Token(self.span, token).into() + TokenTree::token(self.span, token).into() } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 7f8b96508bd..398b4b1da17 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -385,6 +385,7 @@ mod tests { use crate::ast::{self, Ident, PatKind}; use crate::attr::first_attr_value_str_by_name; use crate::ptr::P; + use crate::parse::token::Token; use crate::print::pprust::item_to_string; use crate::tokenstream::{DelimSpan, TokenTree}; use crate::util::parser_testing::string_to_stream; @@ -426,9 +427,9 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), - Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip, false))), + Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })), + Some(&TokenTree::Token(Token { kind: token::Not, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), ) if name_macro_rules.name == sym::macro_rules @@ -438,7 +439,7 @@ mod tests { ( 3, Some(&TokenTree::Delimited(_, first_delim, ref first_tts)), - Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })), Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) if macro_delim == token::Paren => { @@ -446,8 +447,8 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, false))), + Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), ) if first_delim == token::Paren && ident.name.as_str() == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), @@ -456,8 +457,8 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, false))), + Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), ) if second_delim == token::Paren && ident.name.as_str() == "a" => {}, _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), @@ -477,16 +478,16 @@ mod tests { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), + TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), + TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), token::DelimToken::Paren, TokenStream::new(vec![ - TokenTree::Token(sp(6, 7), + TokenTree::token(sp(6, 7), token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), + TokenTree::token(sp(8, 9), token::Colon).into(), + TokenTree::token(sp(10, 13), token::Ident(Ident::from_str("i32"), false)).into(), ]).into(), ).into(), @@ -494,9 +495,9 @@ mod tests { DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ - TokenTree::Token(sp(17, 18), + TokenTree::token(sp(17, 18), token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), + TokenTree::token(sp(18, 19), token::Semi).into(), ]).into(), ).into() ]); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 3b7d4e14dbb..eda67b3a93d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -318,7 +318,7 @@ impl TokenCursor { } match tree { - TokenTree::Token(span, kind) => return Token { kind, span }, + TokenTree::Token(token) => return token, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); @@ -353,9 +353,9 @@ impl TokenCursor { delim_span, token::Bracket, [ - TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::TokenKind::lit( + TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), + TokenTree::token(sp, token::Eq), + TokenTree::token(sp, token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None )), ] @@ -366,10 +366,10 @@ impl TokenCursor { delim_span, token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { - [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] + [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body] .iter().cloned().collect::<TokenStream>().into() } else { - [TokenTree::Token(sp, token::Pound), body] + [TokenTree::token(sp, token::Pound), body] .iter().cloned().collect::<TokenStream>().into() }, ))); @@ -1052,7 +1052,7 @@ impl<'a> Parser<'a> { f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(_, tok) => tok, + TokenTree::Token(token) => token.kind, TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), }, None => token::CloseDelim(self.token_cursor.frame.delim), @@ -1065,7 +1065,7 @@ impl<'a> Parser<'a> { } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { - Some(TokenTree::Token(span, _)) => span, + Some(TokenTree::Token(token)) => token.span, Some(TokenTree::Delimited(span, ..)) => span.entire(), None => self.look_ahead_span(dist - 1), } @@ -2675,7 +2675,7 @@ impl<'a> Parser<'a> { _ => { let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); self.bump(); - TokenTree::Token(span, token) + TokenTree::token(span, token) } } } @@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> { }; TokenStream::new(vec![ args.into(), - TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), + TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(), body.into(), ]) } else { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 3679e4050ff..a06bf9fae7c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -18,6 +18,7 @@ use log::info; use std::fmt; use std::mem; +use std::ops::Deref; #[cfg(target_arch = "x86_64")] use rustc_data_structures::static_assert_size; use rustc_data_structures::sync::Lrc; @@ -165,7 +166,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { ].contains(&ident.name) } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum TokenKind { /* Expression-operator symbols. */ Eq, @@ -235,7 +236,7 @@ pub enum TokenKind { #[cfg(target_arch = "x86_64")] static_assert_size!(TokenKind, 16); -#[derive(Clone, Debug)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub struct Token { pub kind: TokenKind, pub span: Span, @@ -614,6 +615,14 @@ impl PartialEq<TokenKind> for Token { } } +// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`. +impl Deref for Token { + type Target = TokenKind; + fn deref(&self) -> &Self::Target { + &self.kind + } +} + #[derive(Clone, RustcEncodable, RustcDecodable)] /// For interpolation during macro expansion. pub enum Nonterminal { @@ -704,11 +713,11 @@ impl Nonterminal { } Nonterminal::NtIdent(ident, is_raw) => { let token = Ident(ident, is_raw); - Some(TokenTree::Token(ident.span, token).into()) + Some(TokenTree::token(ident.span, token).into()) } Nonterminal::NtLifetime(ident) => { let token = Lifetime(ident); - Some(TokenTree::Token(ident.span, token).into()) + Some(TokenTree::token(ident.span, token).into()) } Nonterminal::NtTT(ref tt) => { Some(tt.clone().into()) @@ -794,7 +803,7 @@ fn prepend_attrs(sess: &ParseSess, if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; let token = Ident(ident, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::Token(ident.span, token)); + brackets.push(tokenstream::TokenTree::token(ident.span, token)); // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. @@ -808,7 +817,7 @@ fn prepend_attrs(sess: &ParseSess, // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. - builder.push(tokenstream::TokenTree::Token(attr.span, Pound)); + builder.push(tokenstream::TokenTree::token(attr.span, Pound)); let delim_span = DelimSpan::from_single(attr.span); builder.push(tokenstream::TokenTree::Delimited( delim_span, DelimToken::Bracket, brackets.build().into())); |
