diff options
| author | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-04 20:42:43 +0300 |
|---|---|---|
| committer | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-06 14:03:15 +0300 |
| commit | e0127dbf8135b766a332ce21c4eee48998b59bef (patch) | |
| tree | 4a30906f1c8058e13fd426a56967e7cba9408bf7 /src/libsyntax/ext | |
| parent | a3425edb46dfcc7031068b8bdda868e5a3b16ae1 (diff) | |
| download | rust-e0127dbf8135b766a332ce21c4eee48998b59bef.tar.gz rust-e0127dbf8135b766a332ce21c4eee48998b59bef.zip | |
syntax: Use `Token` in `TokenTree::Token`
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 32 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/quoted.rs | 80 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 12 |
6 files changed, 74 insertions, 69 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 4b5b9ff7bbe..0c2ab672407 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -265,10 +265,12 @@ impl<F> TTMacroExpander for F impl MutVisitor for AvoidInterpolatedIdents { fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { - if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt { - if let token::NtIdent(ident, is_raw) = **nt { - *tt = tokenstream::TokenTree::Token(ident.span, - token::Ident(ident, is_raw)); + if let tokenstream::TokenTree::Token(token) = tt { + if let token::Interpolated(nt) = &token.kind { + if let token::NtIdent(ident, is_raw) = **nt { + *tt = tokenstream::TokenTree::token(ident.span, + token::Ident(ident, is_raw)); + } } } mut_visit::noop_visit_tt(tt, self) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 7b158b65d15..4396b9be9bb 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); - let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item { + let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index c22952ed750..6acdffedd6b 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -78,7 +78,7 @@ use crate::ast::Ident; use crate::ext::tt::quoted::{self, TokenTree}; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::{Parser, PathStyle}; -use crate::parse::token::{self, DocComment, Nonterminal, TokenKind}; +use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::{DelimSpan, TokenStream}; @@ -609,7 +609,8 @@ fn inner_parse_loop<'root, 'tt>( // // At the beginning of the loop, if we reach the end of the delimited submatcher, // we pop the stack to backtrack out of the descent. - seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | + seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => { let lower_elts = mem::replace(&mut item.top_elts, Tt(seq)); let idx = item.idx; item.stack.push(MatcherTtFrame { @@ -621,7 +622,7 @@ fn inner_parse_loop<'root, 'tt>( } // We just matched a normal token. We can just advance the parser. - TokenTree::Token(_, ref t) if token_name_eq(t, token) => { + TokenTree::Token(t) if token_name_eq(&t, token) => { item.idx += 1; next_items.push(item); } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 9d3ea4d8645..703ad0053a0 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -11,7 +11,7 @@ use crate::ext::tt::transcribe::transcribe; use crate::feature_gate::Features; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::Parser; -use crate::parse::token::{self, NtTT}; +use crate::parse::token::{self, Token, NtTT}; use crate::parse::token::TokenKind::*; use crate::symbol::{Symbol, kw, sym}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; @@ -270,7 +270,7 @@ pub fn compile( quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::token(DUMMY_SP, token::FatArrow), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(if body.legacy { token::Semi } else { token::Comma }), @@ -279,7 +279,7 @@ pub fn compile( })), // to phase into semicolon-termination instead of semicolon-separation quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], + tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)], separator: None, op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 @@ -613,7 +613,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone())); } // Reverse scan: Sequence comes before `first`. @@ -663,7 +663,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone())); } assert!(first.maybe_empty); @@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess, let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone())); + new.add_one_maybe(TokenTree::token(sp.entire(), u.clone())); &new } else { &suffix_first @@ -1015,7 +1015,7 @@ enum IsInFollow { fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { use quoted::TokenTree; - if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { + if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. IsInFollow::Yes @@ -1033,8 +1033,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { }, "stmt" | "expr" => { let tokens = vec!["`=>`", "`,`", "`;`"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { FatArrow | Comma | Semi => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, @@ -1043,8 +1043,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { }, "pat" => { let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, Ident(i, false) if i.name == kw::If || i.name == kw::In => IsInFollow::Yes, @@ -1058,8 +1058,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`", "`where`", ]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | @@ -1089,8 +1089,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. let tokens = vec!["`,`", "an ident", "a type"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, Ident(i, is_raw) if is_raw || i.name != kw::Priv => IsInFollow::Yes, @@ -1150,7 +1150,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { - quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok), + quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index fe0cb56b29e..9f4e35ad3d7 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -2,7 +2,8 @@ use crate::ast::NodeId; use crate::early_buffered_lints::BufferedEarlyLintId; use crate::ext::tt::macro_parser; use crate::feature_gate::Features; -use crate::parse::{token, ParseSess}; +use crate::parse::token::{self, Token, TokenKind}; +use crate::parse::ParseSess; use crate::print::pprust; use crate::tokenstream::{self, DelimSpan}; use crate::ast; @@ -39,7 +40,7 @@ impl Delimited { } else { span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; - TokenTree::Token(open_span, self.open_token()) + TokenTree::token(open_span, self.open_token()) } /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. @@ -49,7 +50,7 @@ impl Delimited { } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; - TokenTree::Token(close_span, self.close_token()) + TokenTree::token(close_span, self.close_token()) } } @@ -81,7 +82,7 @@ pub enum KleeneOp { /// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { - Token(Span, token::TokenKind), + Token(Token), Delimited(DelimSpan, Lrc<Delimited>), /// A kleene-style repetition sequence Sequence(DelimSpan, Lrc<SequenceRepetition>), @@ -144,13 +145,17 @@ impl TokenTree { /// Retrieves the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) - | TokenTree::MetaVar(sp, _) - | TokenTree::MetaVarDecl(sp, _, _) => sp, - TokenTree::Delimited(sp, _) - | TokenTree::Sequence(sp, _) => sp.entire(), + TokenTree::Token(Token { span, .. }) + | TokenTree::MetaVar(span, _) + | TokenTree::MetaVarDecl(span, _, _) => span, + TokenTree::Delimited(span, _) + | TokenTree::Sequence(span, _) => span.entire(), } } + + crate fn token(span: Span, kind: TokenKind) -> TokenTree { + TokenTree::Token(Token { kind, span }) + } } /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this @@ -205,14 +210,14 @@ pub fn parse( match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { - Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { - Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { + Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() { + Some(tokenstream::TokenTree::Token(token)) => match token.ident() { Some((kind, _)) => { - let span = end_sp.with_lo(start_sp.lo()); + let span = token.span.with_lo(start_sp.lo()); result.push(TokenTree::MetaVarDecl(span, ident, kind)); continue; } - _ => end_sp, + _ => token.span, }, tree => tree .as_ref() @@ -270,7 +275,7 @@ where // Depending on what `tree` is, we could be parsing different parts of a macro match tree { // `tree` is a `$` token. Look at the next token in `trees` - tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { + tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() { // `tree` is followed by a delimited set of token trees. This indicates the beginning // of a repetition sequence in the macro (e.g. `$(pat)*`). Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => { @@ -316,33 +321,33 @@ where // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special // metavariable that names the crate of the invocation. - Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { + Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); - let span = ident_span.with_lo(span.lo()); + let span = token.span.with_lo(span.lo()); if ident.name == kw::Crate && !is_raw { let ident = ast::Ident::new(kw::DollarCrate, ident.span); - TokenTree::Token(span, token::Ident(ident, is_raw)) + TokenTree::token(span, token::Ident(ident, is_raw)) } else { TokenTree::MetaVar(span, ident) } } // `tree` is followed by a random token. This is an error. - Some(tokenstream::TokenTree::Token(span, tok)) => { + Some(tokenstream::TokenTree::Token(token)) => { let msg = format!( "expected identifier, found `{}`", - pprust::token_to_string(&tok) + pprust::token_to_string(&token), ); - sess.span_diagnostic.span_err(span, &msg); - TokenTree::MetaVar(span, ast::Ident::invalid()) + sess.span_diagnostic.span_err(token.span, &msg); + TokenTree::MetaVar(token.span, ast::Ident::invalid()) } // There are no more tokens. Just return the `$` we already have. - None => TokenTree::Token(span, token::Dollar), + None => TokenTree::token(span, token::Dollar), }, // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + tokenstream::TokenTree::Token(token) => TokenTree::Token(token), // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // descend into the delimited set and further parse it. @@ -380,17 +385,14 @@ fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> { /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token -fn parse_kleene_op<I>( - input: &mut I, - span: Span, -) -> Result<Result<(KleeneOp, Span), (token::TokenKind, Span)>, Span> +fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span> where I: Iterator<Item = tokenstream::TokenTree>, { match input.next() { - Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { - Some(op) => Ok(Ok((op, span))), - None => Ok(Err((tok, span))), + Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) { + Some(op) => Ok(Ok((op, token.span))), + None => Ok(Err(token)), }, tree => Err(tree .as_ref() @@ -466,7 +468,7 @@ where assert_eq!(op, KleeneOp::ZeroOrOne); // Lookahead at #2. If it is a KleenOp, then #1 is a separator. - let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() { + let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() { kleene_op(tok2).is_some() } else { false @@ -504,7 +506,7 @@ where } // #2 is a random token (this is an error) :( - Ok(Err((_, _))) => op1_span, + Ok(Err(_)) => op1_span, // #2 is not even a token at all :( Err(_) => op1_span, @@ -524,7 +526,7 @@ where } // #1 is a separator followed by #2, a KleeneOp - Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + Ok(Err(token)) => match parse_kleene_op(input, token.span) { // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition, // but is allowed in the 2018 edition Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { @@ -539,10 +541,10 @@ where } // #2 is a KleeneOp :D - Ok(Ok((op, _))) => return (Some(tok), op), + Ok(Ok((op, _))) => return (Some(token.kind), op), // #2 is a random token :( - Ok(Err((_, span))) => span, + Ok(Err(token)) => token.span, // #2 is not a token at all :( Err(span) => span, @@ -580,12 +582,12 @@ where Ok(Ok((op, _))) => return (None, op), // #1 is a separator followed by #2, a KleeneOp - Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + Ok(Err(token)) => match parse_kleene_op(input, token.span) { // #2 is the `?` Kleene op, which does not take a separator (error) Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => { // Error! sess.span_diagnostic.span_err( - span, + token.span, "the `?` macro repetition operator does not take a separator", ); @@ -594,10 +596,10 @@ where } // #2 is a KleeneOp :D - Ok(Ok((op, _))) => return (Some(tok), op), + Ok(Ok((op, _))) => return (Some(token.kind), op), // #2 is a random token :( - Ok(Err((_, span))) => span, + Ok(Err(token)) => token.span, // #2 is not a token at all :( Err(span) => span, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 1b169d7696a..1dbb0638df1 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -119,7 +119,7 @@ pub fn transcribe( Some((tt, _)) => tt.span(), None => DUMMY_SP, }; - result.push(TokenTree::Token(prev_span, sep).into()); + result.push(TokenTree::token(prev_span, sep).into()); } continue; } @@ -225,7 +225,7 @@ pub fn transcribe( result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); + let token = TokenTree::token(sp, token::Interpolated(nt.clone())); result.push(token.into()); } } else { @@ -241,8 +241,8 @@ pub fn transcribe( let ident = Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); - result.push(TokenTree::Token(sp, token::Dollar).into()); - result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into()); + result.push(TokenTree::token(sp, token::Dollar).into()); + result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into()); } } @@ -259,9 +259,9 @@ pub fn transcribe( // Nothing much to do here. Just push the token to the result, being careful to // preserve syntax context. - quoted::TokenTree::Token(sp, tok) => { + quoted::TokenTree::Token(token) => { let mut marker = Marker(cx.current_expansion.mark); - let mut tt = TokenTree::Token(sp, tok); + let mut tt = TokenTree::Token(token); noop_visit_tt(&mut tt, &mut marker); result.push(tt.into()); } |
