about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-05 11:56:06 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-06 14:04:02 +0300
commitf745e5f9b676be02cc1dfbab0bfb338dc72b4dd3 (patch)
treea4982d98a51c2f8358495b9b60e3a059a9e9c1cd /src/libsyntax/parse
parent4c5d773b4d529c6263f682513ea34ce644a8179b (diff)
downloadrust-f745e5f9b676be02cc1dfbab0bfb338dc72b4dd3.tar.gz
rust-f745e5f9b676be02cc1dfbab0bfb338dc72b4dd3.zip
syntax: Remove duplicate span from `token::Ident`
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/diagnostics.rs6
-rw-r--r--src/libsyntax/parse/lexer/mod.rs22
-rw-r--r--src/libsyntax/parse/literal.rs8
-rw-r--r--src/libsyntax/parse/mod.rs31
-rw-r--r--src/libsyntax/parse/parser.rs48
-rw-r--r--src/libsyntax/parse/token.rs94
6 files changed, 105 insertions, 104 deletions
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index 1759a229cf4..7830b2ce880 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -201,12 +201,12 @@ impl<'a> Parser<'a> {
             self.span,
             &format!("expected identifier, found {}", self.this_token_descr()),
         );
-        if let token::Ident(ident, false) = &self.token.kind {
-            if ident.is_raw_guess() {
+        if let token::Ident(name, false) = self.token.kind {
+            if Ident::new(name, self.span).is_raw_guess() {
                 err.span_suggestion(
                     self.span,
                     "you can escape reserved keywords to use them as identifiers",
-                    format!("r#{}", ident),
+                    format!("r#{}", name),
                     Applicability::MaybeIncorrect,
                 );
             }
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index da8c6f5ac22..e3d959c2c54 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,4 +1,4 @@
-use crate::ast::{self, Ident};
+use crate::ast;
 use crate::parse::ParseSess;
 use crate::parse::token::{self, Token, TokenKind};
 use crate::symbol::{sym, Symbol};
@@ -61,15 +61,6 @@ impl<'a> StringReader<'a> {
         (real, raw)
     }
 
-    fn mk_ident(&self, string: &str) -> Ident {
-        let mut ident = Ident::from_str(string);
-        if let Some(span) = self.override_span {
-            ident.span = span;
-        }
-
-        ident
-    }
-
     fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
         match res {
             Ok(tok) => tok,
@@ -858,17 +849,17 @@ impl<'a> StringReader<'a> {
 
                 return Ok(self.with_str_from(start, |string| {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    let ident = self.mk_ident(string);
+                    let name = ast::Name::intern(string);
 
                     if is_raw_ident {
                         let span = self.mk_sp(raw_start, self.pos);
-                        if !ident.can_be_raw() {
-                            self.err_span(span, &format!("`{}` cannot be a raw identifier", ident));
+                        if !name.can_be_raw() {
+                            self.err_span(span, &format!("`{}` cannot be a raw identifier", name));
                         }
                         self.sess.raw_identifier_spans.borrow_mut().push(span);
                     }
 
-                    token::Ident(ident, is_raw_ident)
+                    token::Ident(name, is_raw_ident)
                 }));
             }
         }
@@ -1567,12 +1558,11 @@ mod tests {
                                         &sh,
                                         "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                             .to_string());
-            let id = Ident::from_str("fn");
             assert_eq!(string_reader.next_token(), token::Comment);
             assert_eq!(string_reader.next_token(), token::Whitespace);
             let tok1 = string_reader.next_token();
             let tok2 = Token::new(
-                token::Ident(id, false),
+                token::Ident(Symbol::intern("fn"), false),
                 Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
             );
             assert_eq!(tok1.kind, tok2.kind);
diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs
index 978fd205ea4..7b27304071c 100644
--- a/src/libsyntax/parse/literal.rs
+++ b/src/libsyntax/parse/literal.rs
@@ -1,6 +1,6 @@
 //! Code related to parsing literals.
 
-use crate::ast::{self, Ident, Lit, LitKind};
+use crate::ast::{self, Lit, LitKind};
 use crate::parse::parser::Parser;
 use crate::parse::PResult;
 use crate::parse::token::{self, Token, TokenKind};
@@ -230,8 +230,8 @@ impl Lit {
     /// Converts arbitrary token into an AST literal.
     crate fn from_token(token: &TokenKind, span: Span) -> Result<Lit, LitError> {
         let lit = match *token {
-            token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
-                token::Lit::new(token::Bool, ident.name, None),
+            token::Ident(name, false) if name == kw::True || name == kw::False =>
+                token::Lit::new(token::Bool, name, None),
             token::Literal(lit) =>
                 lit,
             token::Interpolated(ref nt) => {
@@ -258,7 +258,7 @@ impl Lit {
     /// Losslessly convert an AST literal into a token stream.
     crate fn tokens(&self) -> TokenStream {
         let token = match self.token.kind {
-            token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
+            token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
         TokenTree::token(self.span, token).into()
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 5187621258d..2b82767d7e9 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -382,11 +382,12 @@ impl SeqSep {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::ast::{self, Ident, PatKind};
+    use crate::ast::{self, Name, PatKind};
     use crate::attr::first_attr_value_str_by_name;
     use crate::ptr::P;
     use crate::parse::token::Token;
     use crate::print::pprust::item_to_string;
+    use crate::symbol::{kw, sym};
     use crate::tokenstream::{DelimSpan, TokenTree};
     use crate::util::parser_testing::string_to_stream;
     use crate::util::parser_testing::{string_to_expr, string_to_item};
@@ -418,8 +419,6 @@ mod tests {
     #[test]
     fn string_to_tts_macro () {
         with_default_globals(|| {
-            use crate::symbol::sym;
-
             let tts: Vec<_> =
                 string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
             let tts: &[TokenTree] = &tts[..];
@@ -432,8 +431,7 @@ mod tests {
                     Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
                     Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
                 )
-                if name_macro_rules.name == sym::macro_rules
-                && name_zip.name.as_str() == "zip" => {
+                if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => {
                     let tts = &macro_tts.trees().collect::<Vec<_>>();
                     match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                         (
@@ -448,9 +446,9 @@ mod tests {
                                 (
                                     2,
                                     Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
-                                    Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
+                                    Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
                                 )
-                                if first_delim == token::Paren && ident.name.as_str() == "a" => {},
+                                if first_delim == token::Paren && name.as_str() == "a" => {},
                                 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                             }
                             let tts = &second_tts.trees().collect::<Vec<_>>();
@@ -458,9 +456,9 @@ mod tests {
                                 (
                                     2,
                                     Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
-                                    Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
+                                    Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
                                 )
-                                if second_delim == token::Paren && ident.name.as_str() == "a" => {},
+                                if second_delim == token::Paren && name.as_str() == "a" => {},
                                 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
                             }
                         },
@@ -478,25 +476,22 @@ mod tests {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
             let expected = TokenStream::new(vec![
-                TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
-                TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
+                TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(),
+                TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
                     TokenStream::new(vec![
-                        TokenTree::token(sp(6, 7),
-                                         token::Ident(Ident::from_str("b"), false)).into(),
+                        TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(),
                         TokenTree::token(sp(8, 9), token::Colon).into(),
-                        TokenTree::token(sp(10, 13),
-                                         token::Ident(Ident::from_str("i32"), false)).into(),
+                        TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(),
                     ]).into(),
                 ).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
                     TokenStream::new(vec![
-                        TokenTree::token(sp(17, 18),
-                                         token::Ident(Ident::from_str("b"), false)).into(),
+                        TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(),
                         TokenTree::token(sp(18, 19), token::Semi).into(),
                     ]).into(),
                 ).into()
@@ -604,8 +599,6 @@ mod tests {
 
     #[test] fn crlf_doc_comments() {
         with_default_globals(|| {
-            use crate::symbol::sym;
-
             let sess = ParseSess::new(FilePathMapping::empty());
 
             let name_1 = FileName::Custom("crlf_source_1".to_string());
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 362f81d02a0..57a49d1524d 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -362,7 +362,7 @@ impl TokenCursor {
             delim_span,
             token::Bracket,
             [
-                TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+                TokenTree::token(sp, token::Ident(sym::doc, false)),
                 TokenTree::token(sp, token::Eq),
                 TokenTree::token(sp, token::TokenKind::lit(
                     token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
@@ -541,9 +541,9 @@ impl<'a> Parser<'a> {
 
     crate fn token_descr(&self) -> Option<&'static str> {
         Some(match &self.token.kind {
-            t if t.is_special_ident() => "reserved identifier",
-            t if t.is_used_keyword() => "keyword",
-            t if t.is_unused_keyword() => "reserved keyword",
+            _ if self.token.is_special_ident() => "reserved identifier",
+            _ if self.token.is_used_keyword() => "keyword",
+            _ if self.token.is_unused_keyword() => "reserved keyword",
             token::DocComment(..) => "doc comment",
             _ => return None,
         })
@@ -619,7 +619,7 @@ impl<'a> Parser<'a> {
 
     fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
         match self.token.kind {
-            token::Ident(ident, _) => {
+            token::Ident(name, _) => {
                 if self.token.is_reserved_ident() {
                     let mut err = self.expected_ident_found();
                     if recover {
@@ -630,7 +630,7 @@ impl<'a> Parser<'a> {
                 }
                 let span = self.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => {
                 Err(if self.prev_token_kind == PrevTokenKind::DocComment {
@@ -1618,10 +1618,10 @@ impl<'a> Parser<'a> {
 
     fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
         match self.token.kind {
-            token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
+            token::Ident(name, _) if name.is_path_segment_keyword() => {
                 let span = self.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => self.parse_ident(),
         }
@@ -1629,10 +1629,10 @@ impl<'a> Parser<'a> {
 
     fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
         match self.token.kind {
-            token::Ident(ident, false) if ident.name == kw::Underscore => {
+            token::Ident(name, false) if name == kw::Underscore => {
                 let span = self.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => self.parse_ident(),
         }
@@ -2368,13 +2368,11 @@ impl<'a> Parser<'a> {
             }
 
             let mut recovery_field = None;
-            if let token::Ident(ident, _) = self.token.kind {
+            if let token::Ident(name, _) = self.token.kind {
                 if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
                     // Use in case of error after field-looking code: `S { foo: () with a }`
-                    let mut ident = ident.clone();
-                    ident.span = self.span;
                     recovery_field = Some(ast::Field {
-                        ident,
+                        ident: Ident::new(name, self.span),
                         span: self.span,
                         expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
                         is_shorthand: false,
@@ -2637,7 +2635,7 @@ impl<'a> Parser<'a> {
                              self.look_ahead(1, |t| t.is_ident()) => {
                 self.bump();
                 let name = match self.token.kind {
-                    token::Ident(ident, _) => ident,
+                    token::Ident(name, _) => name,
                     _ => unreachable!()
                 };
                 let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
@@ -2651,7 +2649,7 @@ impl<'a> Parser<'a> {
                 // Interpolated identifier and lifetime tokens are replaced with usual identifier
                 // and lifetime tokens, so the former are never encountered during normal parsing.
                 match **nt {
-                    token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span),
+                    token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident.name, is_raw), ident.span),
                     token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span),
                     _ => return,
                 }
@@ -2766,7 +2764,7 @@ impl<'a> Parser<'a> {
                 let token_cannot_continue_expr = |t: &Token| match t.kind {
                     // These tokens can start an expression after `!`, but
                     // can't continue an expression after an ident
-                    token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
+                    token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
                     token::Literal(..) | token::Pound => true,
                     token::Interpolated(ref nt) => match **nt {
                         token::NtIdent(..) | token::NtExpr(..) |
@@ -4328,7 +4326,7 @@ impl<'a> Parser<'a> {
                      -> PResult<'a, Option<P<Item>>> {
         let token_lo = self.span;
         let (ident, def) = match self.token.kind {
-            token::Ident(ident, false) if ident.name == kw::Macro => {
+            token::Ident(name, false) if name == kw::Macro => {
                 self.bump();
                 let ident = self.parse_ident()?;
                 let tokens = if self.check(&token::OpenDelim(token::Brace)) {
@@ -4356,8 +4354,8 @@ impl<'a> Parser<'a> {
 
                 (ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
             }
-            token::Ident(ident, _) if ident.name == sym::macro_rules &&
-                                   self.look_ahead(1, |t| *t == token::Not) => {
+            token::Ident(name, _) if name == sym::macro_rules &&
+                                     self.look_ahead(1, |t| *t == token::Not) => {
                 let prev_span = self.prev_span;
                 self.complain_if_pub_macro(&vis.node, prev_span);
                 self.bump();
@@ -5481,8 +5479,8 @@ impl<'a> Parser<'a> {
     fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
         let expect_ident = |this: &mut Self| match this.token.kind {
             // Preserve hygienic context.
-            token::Ident(ident, _) =>
-                { let span = this.span; this.bump(); Ident::new(ident.name, span) }
+            token::Ident(name, _) =>
+                { let span = this.span; this.bump(); Ident::new(name, span) }
             _ => unreachable!()
         };
         let isolated_self = |this: &mut Self, n| {
@@ -5805,11 +5803,7 @@ impl<'a> Parser<'a> {
         match *vis {
             VisibilityKind::Inherited => {}
             _ => {
-                let is_macro_rules: bool = match self.token.kind {
-                    token::Ident(sid, _) => sid.name == sym::macro_rules,
-                    _ => false,
-                };
-                let mut err = if is_macro_rules {
+                let mut err = if self.token.is_keyword(sym::macro_rules) {
                     let mut err = self.diagnostic()
                         .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
                     err.span_suggestion(
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 81c93a4179e..ba7c88e7000 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -118,8 +118,8 @@ impl Lit {
     }
 }
 
-pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
-    let ident_token: TokenKind = Ident(ident, is_raw);
+pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
+    let ident_token = Token::new(Ident(name, is_raw), span);
 
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
@@ -146,11 +146,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
         kw::While,
         kw::Yield,
         kw::Static,
-    ].contains(&ident.name)
+    ].contains(&name)
 }
 
-fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
-    let ident_token: TokenKind = Ident(ident, is_raw);
+fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool {
+    let ident_token = Token::new(Ident(name, is_raw), span);
 
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
@@ -163,7 +163,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
         kw::Extern,
         kw::Typeof,
         kw::Dyn,
-    ].contains(&ident.name)
+    ].contains(&name)
 }
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
@@ -210,7 +210,7 @@ pub enum TokenKind {
     Literal(Lit),
 
     /* Name components */
-    Ident(ast::Ident, /* is_raw */ bool),
+    Ident(ast::Name, /* is_raw */ bool),
     Lifetime(ast::Name),
 
     Interpolated(Lrc<Nonterminal>),
@@ -245,7 +245,7 @@ pub struct Token {
 impl TokenKind {
     /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary.
     pub fn from_ast_ident(ident: ast::Ident) -> TokenKind {
-        Ident(ident, ident.is_raw_guess())
+        Ident(ident.name, ident.is_raw_guess())
     }
 
     crate fn is_like_plus(&self) -> bool {
@@ -254,12 +254,14 @@ impl TokenKind {
             _ => false,
         }
     }
+}
 
+impl Token {
     /// Returns `true` if the token can appear at the start of an expression.
     crate fn can_begin_expr(&self) -> bool {
-        match *self {
-            Ident(ident, is_raw)              =>
-                ident_can_begin_expr(ident, is_raw), // value name or keyword
+        match self.kind {
+            Ident(name, is_raw)              =>
+                ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
             OpenDelim(..)                     | // tuple, array or block
             Literal(..)                       | // literal
             Not                               | // operator not
@@ -289,9 +291,9 @@ impl TokenKind {
 
     /// Returns `true` if the token can appear at the start of a type.
     crate fn can_begin_type(&self) -> bool {
-        match *self {
-            Ident(ident, is_raw)        =>
-                ident_can_begin_type(ident, is_raw), // type name or keyword
+        match self.kind {
+            Ident(name, is_raw)        =>
+                ident_can_begin_type(name, self.span, is_raw), // type name or keyword
             OpenDelim(Paren)            | // tuple
             OpenDelim(Bracket)          | // array
             Not                         | // never
@@ -309,7 +311,9 @@ impl TokenKind {
             _ => false,
         }
     }
+}
 
+impl TokenKind {
     /// Returns `true` if the token can appear at the start of a const param.
     pub fn can_begin_const_arg(&self) -> bool {
         match self {
@@ -323,13 +327,17 @@ impl TokenKind {
             _ => self.can_begin_literal_or_bool(),
         }
     }
+}
 
+impl Token {
     /// Returns `true` if the token can appear at the start of a generic bound.
     crate fn can_begin_bound(&self) -> bool {
         self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
         self == &Question || self == &OpenDelim(Paren)
     }
+}
 
+impl TokenKind {
     pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind {
         Literal(Lit::new(kind, symbol, suffix))
     }
@@ -355,8 +363,8 @@ impl TokenKind {
         match *self {
             Literal(..)  => true,
             BinOp(Minus) => true,
-            Ident(ident, false) if ident.name == kw::True => true,
-            Ident(ident, false) if ident.name == kw::False => true,
+            Ident(name, false) if name == kw::True => true,
+            Ident(name, false) if name == kw::False => true,
             Interpolated(ref nt) => match **nt {
                 NtLiteral(..) => true,
                 _             => false,
@@ -367,6 +375,18 @@ impl TokenKind {
 }
 
 impl Token {
+    /// Returns an identifier if this token is an identifier.
+    pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
+        match self.kind {
+            Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)),
+            Interpolated(ref nt) => match **nt {
+                NtIdent(ident, is_raw) => Some((ident, is_raw)),
+                _ => None,
+            },
+            _ => None,
+        }
+    }
+
     /// Returns a lifetime identifier if this token is a lifetime.
     pub fn lifetime(&self) -> Option<ast::Ident> {
         match self.kind {
@@ -381,12 +401,12 @@ impl Token {
 }
 
 impl TokenKind {
-    /// Returns an identifier if this token is an identifier.
-    pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
+    /// Returns an identifier name if this token is an identifier.
+    pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> {
         match *self {
-            Ident(ident, is_raw) => Some((ident, is_raw)),
+            Ident(name, is_raw) => Some((name, is_raw)),
             Interpolated(ref nt) => match **nt {
-                NtIdent(ident, is_raw) => Some((ident, is_raw)),
+                NtIdent(ident, is_raw) => Some((ident.name, is_raw)),
                 _ => None,
             },
             _ => None,
@@ -405,7 +425,7 @@ impl TokenKind {
     }
     /// Returns `true` if the token is an identifier.
     pub fn is_ident(&self) -> bool {
-        self.ident().is_some()
+        self.ident_name().is_some()
     }
     /// Returns `true` if the token is a lifetime.
     crate fn is_lifetime(&self) -> bool {
@@ -415,10 +435,7 @@ impl TokenKind {
     /// Returns `true` if the token is a identifier whose name is the given
     /// string slice.
     crate fn is_ident_named(&self, name: Symbol) -> bool {
-        match self.ident() {
-            Some((ident, _)) => ident.name == name,
-            None => false
-        }
+        self.ident_name().map_or(false, |(ident_name, _)| ident_name == name)
     }
 
     /// Returns `true` if the token is an interpolated path.
@@ -440,24 +457,30 @@ impl TokenKind {
     crate fn is_qpath_start(&self) -> bool {
         self == &Lt || self == &BinOp(Shl)
     }
+}
 
+impl Token {
     crate fn is_path_start(&self) -> bool {
         self == &ModSep || self.is_qpath_start() || self.is_path() ||
         self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
     }
+}
 
+impl TokenKind {
     /// Returns `true` if the token is a given keyword, `kw`.
     pub fn is_keyword(&self, kw: Symbol) -> bool {
-        self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
+        self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false)
     }
 
     pub fn is_path_segment_keyword(&self) -> bool {
-        match self.ident() {
-            Some((id, false)) => id.is_path_segment_keyword(),
+        match self.ident_name() {
+            Some((name, false)) => name.is_path_segment_keyword(),
             _ => false,
         }
     }
+}
 
+impl Token {
     // Returns true for reserved identifiers used internally for elided lifetimes,
     // unnamed method parameters, crate root module, error recovery etc.
     pub fn is_special_ident(&self) -> bool {
@@ -490,7 +513,9 @@ impl TokenKind {
             _ => false,
         }
     }
+}
 
+impl TokenKind {
     crate fn glue(self, joint: TokenKind) -> Option<TokenKind> {
         Some(match self {
             Eq => match joint {
@@ -537,7 +562,7 @@ impl TokenKind {
                 _ => return None,
             },
             SingleQuote => match joint {
-                Ident(ident, false) => Lifetime(Symbol::intern(&format!("'{}", ident))),
+                Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))),
                 _ => return None,
             },
 
@@ -608,9 +633,9 @@ impl TokenKind {
             (&Literal(a), &Literal(b)) => a == b,
 
             (&Lifetime(a), &Lifetime(b)) => a == b,
-            (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
-                                                       a.name == kw::DollarCrate ||
-                                                       c.name == kw::DollarCrate),
+            (&Ident(a, b), &Ident(c, d)) => b == d && (a == c ||
+                                                       a == kw::DollarCrate ||
+                                                       c == kw::DollarCrate),
 
             (&Interpolated(_), &Interpolated(_)) => false,
 
@@ -738,8 +763,7 @@ impl Nonterminal {
                 prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
             }
             Nonterminal::NtIdent(ident, is_raw) => {
-                let token = Ident(ident, is_raw);
-                Some(TokenTree::token(ident.span, token).into())
+                Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into())
             }
             Nonterminal::NtLifetime(ident) => {
                 Some(TokenTree::token(ident.span, Lifetime(ident.name)).into())
@@ -827,7 +851,7 @@ fn prepend_attrs(sess: &ParseSess,
         // For simple paths, push the identifier directly
         if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
             let ident = attr.path.segments[0].ident;
-            let token = Ident(ident, ident.as_str().starts_with("r#"));
+            let token = Ident(ident.name, ident.as_str().starts_with("r#"));
             brackets.push(tokenstream::TokenTree::token(ident.span, token));
 
         // ... and for more complicated paths, fall back to a reparse hack that