about summary refs log tree commit diff
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-08 22:38:23 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-08 22:38:23 +0300
commit25b05147b3ec0a1ed9df9614910a10171b8cf211 (patch)
tree40e348a9fd8394e15c873a56c24957131ba3a977
parent0ca3c2f881fc4bc51bfa92f1adcd1b845b812534 (diff)
downloadrust-25b05147b3ec0a1ed9df9614910a10171b8cf211.tar.gz
rust-25b05147b3ec0a1ed9df9614910a10171b8cf211.zip
syntax: Remove `Deref` impl from `Token`
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs12
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs4
-rw-r--r--src/libsyntax/ext/tt/quoted.rs20
-rw-r--r--src/libsyntax/parse/diagnostics.rs2
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs2
-rw-r--r--src/libsyntax/parse/mod.rs4
-rw-r--r--src/libsyntax/parse/parser.rs18
-rw-r--r--src/libsyntax/parse/token.rs29
-rw-r--r--src/libsyntax/print/pprust.rs12
-rw-r--r--src/libsyntax/tokenstream.rs8
10 files changed, 45 insertions, 66 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 9520adb9029..4758b6a50e5 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -78,7 +78,7 @@ use crate::ast::{Ident, Name};
 use crate::ext::tt::quoted::{self, TokenTree};
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::{Parser, PathStyle};
-use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
+use crate::parse::token::{self, DocComment, Nonterminal, Token};
 use crate::print::pprust;
 use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{DelimSpan, TokenStream};
@@ -417,12 +417,12 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
 
 /// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
 /// other tokens, this is "unexpected token...".
-pub fn parse_failure_msg(tok: TokenKind) -> String {
-    match tok {
+pub fn parse_failure_msg(tok: &Token) -> String {
+    match tok.kind {
         token::Eof => "unexpected end of macro invocation".to_string(),
         _ => format!(
             "no rules expected the token `{}`",
-            pprust::token_to_string(&tok)
+            pprust::token_to_string(tok)
         ),
     }
 }
@@ -804,8 +804,8 @@ pub fn parse(
 
 /// The token is an identifier, but not `_`.
 /// We prohibit passing `_` to macros expecting `ident` for now.
-fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
-    match *token {
+fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
+    match token.kind {
         token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
         _ => None,
     }
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index b7fbfd60ed7..e7da195c005 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
 
     let (token, label) = best_failure.expect("ran no matchers");
     let span = token.span.substitute_dummy(sp);
-    let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind));
+    let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
     err.span_label(span, label);
     if let Some(sp) = def_span {
         if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
@@ -288,7 +288,7 @@ pub fn compile(
     let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
         Success(m) => m,
         Failure(token, msg) => {
-            let s = parse_failure_msg(token.kind);
+            let s = parse_failure_msg(&token);
             let sp = token.span.substitute_dummy(def.span);
             let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
             err.span_label(sp, msg);
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 8f8aa586070..707fb65bcc5 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -23,16 +23,6 @@ pub struct Delimited {
 }
 
 impl Delimited {
-    /// Returns the opening delimiter (possibly `NoDelim`).
-    pub fn open_token(&self) -> TokenKind {
-        token::OpenDelim(self.delim)
-    }
-
-    /// Returns the closing delimiter (possibly `NoDelim`).
-    pub fn close_token(&self) -> TokenKind {
-        token::CloseDelim(self.delim)
-    }
-
     /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
     pub fn open_tt(&self, span: Span) -> TokenTree {
         let open_span = if span.is_dummy() {
@@ -40,7 +30,7 @@ impl Delimited {
         } else {
             span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
-        TokenTree::token(self.open_token(), open_span)
+        TokenTree::token(token::OpenDelim(self.delim), open_span)
     }
 
     /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +40,7 @@ impl Delimited {
         } else {
             span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
-        TokenTree::token(self.close_token(), close_span)
+        TokenTree::token(token::CloseDelim(self.delim), close_span)
     }
 }
 
@@ -282,7 +272,7 @@ where
             Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
                 // Must have `(` not `{` or `[`
                 if delim != token::Paren {
-                    let tok = pprust::token_to_string(&token::OpenDelim(delim));
+                    let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
                     let msg = format!("expected `(`, found `{}`", tok);
                     sess.span_diagnostic.span_err(span.entire(), &msg);
                 }
@@ -371,8 +361,8 @@ where
 
 /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
 /// `None`.
-fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
-    match *token {
+fn kleene_op(token: &Token) -> Option<KleeneOp> {
+    match token.kind {
         token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
         token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
         token::Question => Some(KleeneOp::ZeroOrOne),
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index c4db9a9df45..9d2ac5b4b51 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
         &mut self,
         t: &TokenKind,
     ) -> PResult<'a, bool /* recovered */> {
-        let token_str = pprust::token_to_string(t);
+        let token_str = pprust::token_kind_to_string(t);
         let this_token_str = self.this_token_descr();
         let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
             // Point at the end of the macro call when reaching end of macro arguments.
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index b809f99beba..99d9d40a45b 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -211,7 +211,7 @@ impl<'a> TokenTreesReader<'a> {
                 let raw = self.string_reader.peek_span_src_raw;
                 self.real_token();
                 let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
-                    && token::is_op(&self.token);
+                    && self.token.is_op();
                 Ok((tt, if is_joint { Joint } else { NonJoint }))
             }
         }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 1d708d39a13..cde35681988 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -9,7 +9,7 @@ use crate::parse::parser::emit_unclosed_delims;
 use crate::parse::token::TokenKind;
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::diagnostics::plugin::ErrorMap;
-use crate::print::pprust::token_to_string;
+use crate::print::pprust;
 
 use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
 use rustc_data_structures::sync::{Lrc, Lock};
@@ -312,7 +312,7 @@ pub fn maybe_file_to_stream(
             for unmatched in unmatched_braces {
                 let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
                     "incorrect close delimiter: `{}`",
-                    token_to_string(&token::CloseDelim(unmatched.found_delim)),
+                    pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
                 ));
                 db.span_label(unmatched.found_span, "incorrect close delimiter");
                 if let Some(sp) = unmatched.candidate_span {
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 8ad43a6c809..d9eba3bbadb 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -401,7 +401,7 @@ crate enum TokenType {
 impl TokenType {
     crate fn to_string(&self) -> String {
         match *self {
-            TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
+            TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
             TokenType::Keyword(kw) => format!("`{}`", kw),
             TokenType::Operator => "an operator".to_string(),
             TokenType::Lifetime => "lifetime".to_string(),
@@ -418,7 +418,7 @@ impl TokenType {
 ///
 /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
 /// that `IDENT` is not the ident of a fn trait.
-fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool {
+fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
     t == &token::ModSep || t == &token::Lt ||
     t == &token::BinOp(token::Shl)
 }
@@ -586,10 +586,10 @@ impl<'a> Parser<'a> {
         edible: &[TokenKind],
         inedible: &[TokenKind],
     ) -> PResult<'a, bool /* recovered */> {
-        if edible.contains(&self.token) {
+        if edible.contains(&self.token.kind) {
             self.bump();
             Ok(false)
-        } else if inedible.contains(&self.token) {
+        } else if inedible.contains(&self.token.kind) {
             // leave it in the input
             Ok(false)
         } else if self.last_unexpected_token_span == Some(self.token.span) {
@@ -951,7 +951,7 @@ impl<'a> Parser<'a> {
                         Err(mut e) => {
                             // Attempt to keep parsing if it was a similar separator
                             if let Some(ref tokens) = t.similar_tokens() {
-                                if tokens.contains(&self.token) {
+                                if tokens.contains(&self.token.kind) {
                                     self.bump();
                                 }
                             }
@@ -1756,7 +1756,7 @@ impl<'a> Parser<'a> {
     fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
         let ident = self.parse_path_segment_ident()?;
 
-        let is_args_start = |token: &TokenKind| match *token {
+        let is_args_start = |token: &Token| match token.kind {
             token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
             | token::LArrow => true,
             _ => false,
@@ -2822,7 +2822,7 @@ impl<'a> Parser<'a> {
                 LhsExpr::AttributesParsed(attrs) => Some(attrs),
                 _ => None,
             };
-            if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
+            if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
                 return self.parse_prefix_range_expr(attrs);
             } else {
                 self.parse_prefix_expr(attrs)?
@@ -3099,7 +3099,7 @@ impl<'a> Parser<'a> {
             self.err_dotdotdot_syntax(self.token.span);
         }
 
-        debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
+        debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
                       "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
                       self.token);
         let tok = self.token.clone();
@@ -7867,7 +7867,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
     for unmatched in unclosed_delims.iter() {
         let mut err = handler.struct_span_err(unmatched.found_span, &format!(
             "incorrect close delimiter: `{}`",
-            pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
+            pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
         ));
         err.span_label(unmatched.found_span, "incorrect close delimiter");
         if let Some(sp) = unmatched.candidate_span {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 0ece5b57935..9b845ca524e 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -17,7 +17,6 @@ use log::info;
 
 use std::fmt;
 use std::mem;
-use std::ops::Deref;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert_size;
 use rustc_data_structures::sync::Lrc;
@@ -553,11 +552,11 @@ impl TokenKind {
 impl Token {
     // See comments in `Nonterminal::to_tokenstream` for why we care about
     // *probably* equal here rather than actual equality
-    crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool {
-        if mem::discriminant(&self.kind) != mem::discriminant(other) {
+    crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
+        if mem::discriminant(&self.kind) != mem::discriminant(&other.kind) {
             return false
         }
-        match (&self.kind, other) {
+        match (&self.kind, &other.kind) {
             (&Eq, &Eq) |
             (&Lt, &Lt) |
             (&Le, &Le) |
@@ -631,14 +630,6 @@ impl PartialEq<TokenKind> for Token {
     }
 }
 
-// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
-impl Deref for Token {
-    type Target = TokenKind;
-    fn deref(&self) -> &Self::Target {
-        &self.kind
-    }
-}
-
 #[derive(Clone, RustcEncodable, RustcDecodable)]
 /// For interpolation during macro expansion.
 pub enum Nonterminal {
@@ -778,12 +769,14 @@ impl Nonterminal {
     }
 }
 
-crate fn is_op(tok: &TokenKind) -> bool {
-    match *tok {
-        OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
-        Ident(..) | Lifetime(..) | Interpolated(..) |
-        Whitespace | Comment | Shebang(..) | Eof => false,
-        _ => true,
+impl Token {
+    crate fn is_op(&self) -> bool {
+        match self.kind {
+            OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
+            Ident(..) | Lifetime(..) | Interpolated(..) |
+            Whitespace | Comment | Shebang(..) | Eof => false,
+            _ => true,
+        }
     }
 }
 
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index d922e1896cc..4cbe590d44b 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -6,7 +6,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
 use crate::util::parser::{self, AssocOp, Fixity};
 use crate::attr;
 use crate::source_map::{self, SourceMap, Spanned};
-use crate::parse::token::{self, BinOpToken, Nonterminal, TokenKind};
+use crate::parse::token::{self, BinOpToken, Nonterminal, Token, TokenKind};
 use crate::parse::lexer::comments;
 use crate::parse::{self, ParseSess};
 use crate::print::pp::{self, Breaks};
@@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
     out
 }
 
-pub fn token_to_string(tok: &TokenKind) -> String {
+pub fn token_kind_to_string(tok: &TokenKind) -> String {
     match *tok {
         token::Eq                   => "=".to_string(),
         token::Lt                   => "<".to_string(),
@@ -250,6 +250,10 @@ pub fn token_to_string(tok: &TokenKind) -> String {
     }
 }
 
+pub fn token_to_string(token: &Token) -> String {
+    token_kind_to_string(&token.kind)
+}
+
 pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
     match *nt {
         token::NtExpr(ref e)        => expr_to_string(e),
@@ -734,11 +738,11 @@ pub trait PrintState<'a> {
                 }
             }
             TokenTree::Delimited(_, delim, tts) => {
-                self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
+                self.writer().word(token_kind_to_string(&token::OpenDelim(delim)))?;
                 self.writer().space()?;
                 self.print_tts(tts)?;
                 self.writer().space()?;
-                self.writer().word(token_to_string(&token::CloseDelim(delim)))
+                self.writer().word(token_kind_to_string(&token::CloseDelim(delim)))
             },
         }
     }
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index d46d2f549c0..2daec970279 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -126,14 +126,6 @@ impl TokenTree {
         }
     }
 
-    /// Indicates if the stream is a token that is equal to the provided token.
-    pub fn eq_token(&self, t: TokenKind) -> bool {
-        match self {
-            TokenTree::Token(token) => *token == t,
-            _ => false,
-        }
-    }
-
     pub fn joint(self) -> TokenStream {
         TokenStream::new(vec![(self, Joint)])
     }