about summary refs log tree commit diff
path: root/src/libsyntax_ext
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-04 20:42:43 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-06 14:03:15 +0300
commite0127dbf8135b766a332ce21c4eee48998b59bef (patch)
tree4a30906f1c8058e13fd426a56967e7cba9408bf7 /src/libsyntax_ext
parenta3425edb46dfcc7031068b8bdda868e5a3b16ae1 (diff)
downloadrust-e0127dbf8135b766a332ce21c4eee48998b59bef.tar.gz
rust-e0127dbf8135b766a332ce21c4eee48998b59bef.zip
syntax: Use `Token` in `TokenTree::Token`
Diffstat (limited to 'src/libsyntax_ext')
-rw-r--r--src/libsyntax_ext/asm.rs7
-rw-r--r--src/libsyntax_ext/assert.rs2
-rw-r--r--src/libsyntax_ext/concat_idents.rs6
-rw-r--r--src/libsyntax_ext/deriving/custom.rs2
-rw-r--r--src/libsyntax_ext/proc_macro_server.rs24
-rw-r--r--src/libsyntax_ext/trace_macros.rs4
6 files changed, 23 insertions, 22 deletions
diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs
index 4d7083c1a79..83c4c809de3 100644
--- a/src/libsyntax_ext/asm.rs
+++ b/src/libsyntax_ext/asm.rs
@@ -9,7 +9,8 @@ use errors::DiagnosticBuilder;
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::{self, token};
+use syntax::parse;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::ast::AsmDialect;
@@ -86,8 +87,8 @@ fn parse_inline_asm<'a>(
     let first_colon = tts.iter()
         .position(|tt| {
             match *tt {
-                tokenstream::TokenTree::Token(_, token::Colon) |
-                tokenstream::TokenTree::Token(_, token::ModSep) => true,
+                tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
+                tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
                 _ => false,
             }
         })
diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs
index 29dd445e751..8a297a5c9bc 100644
--- a/src/libsyntax_ext/assert.rs
+++ b/src/libsyntax_ext/assert.rs
@@ -29,7 +29,7 @@ pub fn expand_assert<'cx>(
     let panic_call = Mac_ {
         path: Path::from_ident(Ident::new(sym::panic, sp)),
         tts: custom_message.unwrap_or_else(|| {
-            TokenStream::from(TokenTree::Token(
+            TokenStream::from(TokenTree::token(
                 DUMMY_SP,
                 TokenKind::lit(token::Str, Symbol::intern(&format!(
                     "assertion failed: {}",
diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs
index 77c53f402cc..59f25af3742 100644
--- a/src/libsyntax_ext/concat_idents.rs
+++ b/src/libsyntax_ext/concat_idents.rs
@@ -3,7 +3,7 @@ use rustc_data_structures::thin_vec::ThinVec;
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax_pos::Span;
 use syntax_pos::symbol::{Symbol, sym};
@@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
     for (i, e) in tts.iter().enumerate() {
         if i & 1 == 1 {
             match *e {
-                TokenTree::Token(_, token::Comma) => {}
+                TokenTree::Token(Token { kind: token::Comma, .. }) => {}
                 _ => {
                     cx.span_err(sp, "concat_idents! expecting comma.");
                     return DummyResult::any(sp);
@@ -38,7 +38,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
             }
         } else {
             match *e {
-                TokenTree::Token(_, token::Ident(ident, _)) =>
+                TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) =>
                     res_str.push_str(&ident.as_str()),
                 _ => {
                     cx.span_err(sp, "concat_idents! requires ident args.");
diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs
index c1d93805a58..3deab97db88 100644
--- a/src/libsyntax_ext/deriving/custom.rs
+++ b/src/libsyntax_ext/deriving/custom.rs
@@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive {
         MarkAttrs(&self.attrs).visit_item(&item);
 
         let token = token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
+        let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
 
         let server = proc_macro_server::Rustc::new(ecx);
         let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs
index 119b83b7527..26eb9e9d4fc 100644
--- a/src/libsyntax_ext/proc_macro_server.rs
+++ b/src/libsyntax_ext/proc_macro_server.rs
@@ -55,7 +55,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
         use syntax::parse::token::*;
 
         let joint = is_joint == Joint;
-        let (span, token) = match tree {
+        let Token { kind, span } = match tree {
             tokenstream::TokenTree::Delimited(span, delim, tts) => {
                 let delimiter = Delimiter::from_internal(delim);
                 return TokenTree::Group(Group {
@@ -64,7 +64,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
                     span,
                 });
             }
-            tokenstream::TokenTree::Token(span, token) => (span, token),
+            tokenstream::TokenTree::Token(token) => token,
         };
 
         macro_rules! tt {
@@ -93,7 +93,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
             }};
         }
 
-        match token {
+        match kind {
             Eq => op!('='),
             Lt => op!('<'),
             Le => op!('<', '='),
@@ -164,7 +164,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
                     TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                 ]
                 .into_iter()
-                .map(|token| tokenstream::TokenTree::Token(span, token))
+                .map(|kind| tokenstream::TokenTree::token(span, kind))
                 .collect();
                 stack.push(TokenTree::Group(Group {
                     delimiter: Delimiter::Bracket,
@@ -212,7 +212,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
                 let token = Ident(ast::Ident::new(sym, span), is_raw);
-                return tokenstream::TokenTree::Token(span, token).into();
+                return tokenstream::TokenTree::token(span, token).into();
             }
             TokenTree::Literal(self::Literal {
                 lit: token::Lit { kind: token::Integer, symbol, suffix },
@@ -221,8 +221,8 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, integer);
+                let a = tokenstream::TokenTree::token(span, minus);
+                let b = tokenstream::TokenTree::token(span, integer);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -232,16 +232,16 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, float);
+                let a = tokenstream::TokenTree::token(span, minus);
+                let b = tokenstream::TokenTree::token(span, float);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::Token(span, Literal(lit)).into()
+                return tokenstream::TokenTree::token(span, Literal(lit)).into()
             }
         };
 
-        let token = match ch {
+        let kind = match ch {
             '=' => Eq,
             '<' => Lt,
             '>' => Gt,
@@ -267,7 +267,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::Token(span, token);
+        let tree = tokenstream::TokenTree::token(span, kind);
         TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
     }
 }
diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs
index 61ef94560cc..6c74f77ff1f 100644
--- a/src/libsyntax_ext/trace_macros.rs
+++ b/src/libsyntax_ext/trace_macros.rs
@@ -17,10 +17,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
     }
 
     match (tt.len(), tt.first()) {
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
+        (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
             cx.set_trace_macros(true);
         }
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
+        (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
             cx.set_trace_macros(false);
         }
         _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),