about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2019-06-07 06:52:09 +0000
committerbors <bors@rust-lang.org>2019-06-07 06:52:09 +0000
commitca1bcfdde3f19afd68ef808cecf2ce56d08d5df4 (patch)
tree07a0d2ef9340fa064341cc697a8ae58e3762373a
parentc5295ac64a8f2c7aee9cdd13b8fe00b82aff8435 (diff)
parent3a31f0634bb1669eae64e83f595942986f867125 (diff)
downloadrust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.tar.gz
rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.zip
Auto merge of #61541 - petrochenkov:tsp, r=oli-obk
syntax: Keep token span as a part of `Token`

In the world with proc macros and edition hygiene `Token` without a span is not self-contained.
In practice this means that tokens and spans are always stored and passed somewhere along with each other.
This PR combines them into a single struct by doing the next renaming/replacement:

- `Token` -> `TokenKind`
- `TokenAndSpan` -> `Token`
- `(Token, Span)` -> `Token`

Some later commits (https://github.com/rust-lang/rust/commit/fb6e2fe8fd6caed247857758c6c3549fe2b59527 and https://github.com/rust-lang/rust/commit/1cdee86940db892cd17239c26add5364335e895a) remove duplicate spans in `token::Ident` and `token::Lifetime`.
Those spans were supposed to be identical to token spans, but could easily go out of sync, as was noticed in https://github.com/rust-lang/rust/pull/60965#discussion_r285398523.
The `(Token, Span)` -> `Token` change is a soft pre-requisite for this de-duplication since it allows to avoid some larger churn (passing spans to most of functions classifying identifiers).
-rw-r--r--src/doc/unstable-book/src/language-features/plugin.md6
-rw-r--r--src/librustc/hir/lowering.rs14
-rw-r--r--src/librustc/hir/map/def_collector.rs2
-rw-r--r--src/librustc/ich/impls_syntax.rs124
-rw-r--r--src/librustc_lint/builtin.rs12
-rw-r--r--src/librustc_resolve/build_reduced_graph.rs2
-rw-r--r--src/librustc_save_analysis/span_utils.rs22
-rw-r--r--src/librustdoc/html/highlight.rs34
-rw-r--r--src/librustdoc/passes/check_code_block_syntax.rs6
-rw-r--r--src/libsyntax/attr/mod.rs48
-rw-r--r--src/libsyntax/diagnostics/plugin.rs28
-rw-r--r--src/libsyntax/early_buffered_lints.rs2
-rw-r--r--src/libsyntax/ext/base.rs11
-rw-r--r--src/libsyntax/ext/expand.rs6
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs106
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs75
-rw-r--r--src/libsyntax/ext/tt/quoted.rs114
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs16
-rw-r--r--src/libsyntax/feature_gate.rs8
-rw-r--r--src/libsyntax/lib.rs7
-rw-r--r--src/libsyntax/mut_visit.rs22
-rw-r--r--src/libsyntax/parse/attr.rs14
-rw-r--r--src/libsyntax/parse/diagnostics.rs41
-rw-r--r--src/libsyntax/parse/lexer/mod.rs237
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs43
-rw-r--r--src/libsyntax/parse/literal.rs43
-rw-r--r--src/libsyntax/parse/mod.rs65
-rw-r--r--src/libsyntax/parse/parser.rs297
-rw-r--r--src/libsyntax/parse/token.rs199
-rw-r--r--src/libsyntax/print/pprust.rs12
-rw-r--r--src/libsyntax/tokenstream.rs119
-rw-r--r--src/libsyntax/util/parser.rs52
-rw-r--r--src/libsyntax/visit.rs2
-rw-r--r--src/libsyntax_ext/asm.rs9
-rw-r--r--src/libsyntax_ext/assert.rs11
-rw-r--r--src/libsyntax_ext/concat_idents.rs8
-rw-r--r--src/libsyntax_ext/deriving/custom.rs6
-rw-r--r--src/libsyntax_ext/format.rs6
-rw-r--r--src/libsyntax_ext/proc_macro_decls.rs4
-rw-r--r--src/libsyntax_ext/proc_macro_server.rs45
-rw-r--r--src/libsyntax_ext/trace_macros.rs4
-rw-r--r--src/libsyntax_pos/symbol.rs35
-rw-r--r--src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs20
43 files changed, 978 insertions, 959 deletions
diff --git a/src/doc/unstable-book/src/language-features/plugin.md b/src/doc/unstable-book/src/language-features/plugin.md
index 43fffd68037..1994cf49188 100644
--- a/src/doc/unstable-book/src/language-features/plugin.md
+++ b/src/doc/unstable-book/src/language-features/plugin.md
@@ -56,7 +56,7 @@ extern crate syntax_pos;
 extern crate rustc;
 extern crate rustc_plugin;
 
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::tokenstream::TokenTree;
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
 use syntax::ext::build::AstBuilder;  // A trait for expr_usize.
@@ -64,7 +64,7 @@ use syntax_pos::Span;
 use rustc_plugin::Registry;
 
 fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-        -> Box<MacResult + 'static> {
+        -> Box<dyn MacResult + 'static> {
 
     static NUMERALS: &'static [(&'static str, usize)] = &[
         ("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
     }
 
     let text = match args[0] {
-        TokenTree::Token(_, token::Ident(s)) => s.to_string(),
+        TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);
diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs
index d6ad335525c..e7f52b48cb9 100644
--- a/src/librustc/hir/lowering.rs
+++ b/src/librustc/hir/lowering.rs
@@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
 use syntax::std_inject;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::tokenstream::{TokenStream, TokenTree};
-use syntax::parse::token::Token;
+use syntax::parse::token::{self, Token};
 use syntax::visit::{self, Visitor};
 use syntax_pos::{DUMMY_SP, edition, Span};
 
@@ -1328,7 +1328,7 @@ impl<'a> LoweringContext<'a> {
 
     fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
         match tree {
-            TokenTree::Token(span, token) => self.lower_token(token, span),
+            TokenTree::Token(token) => self.lower_token(token),
             TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
                 span,
                 delim,
@@ -1337,13 +1337,13 @@ impl<'a> LoweringContext<'a> {
         }
     }
 
-    fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
-        match token {
-            Token::Interpolated(nt) => {
-                let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
+    fn lower_token(&mut self, token: Token) -> TokenStream {
+        match token.kind {
+            token::Interpolated(nt) => {
+                let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
                 self.lower_token_stream(tts)
             }
-            other => TokenTree::Token(span, other).into(),
+            _ => TokenTree::Token(token).into(),
         }
     }
 
diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs
index a4484c81738..41073773e9f 100644
--- a/src/librustc/hir/map/def_collector.rs
+++ b/src/librustc/hir/map/def_collector.rs
@@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
     }
 
     fn visit_token(&mut self, t: Token) {
-        if let Token::Interpolated(nt) = t {
+        if let token::Interpolated(nt) = t.kind {
             if let token::NtExpr(ref expr) = *nt {
                 if let ExprKind::Mac(..) = expr.node {
                     self.visit_macro_invoc(expr.id);
diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs
index 0cdd9a863cc..abe4196abd1 100644
--- a/src/librustc/ich/impls_syntax.rs
+++ b/src/librustc/ich/impls_syntax.rs
@@ -261,9 +261,8 @@ for tokenstream::TokenTree {
                                           hasher: &mut StableHasher<W>) {
         mem::discriminant(self).hash_stable(hcx, hasher);
         match *self {
-            tokenstream::TokenTree::Token(span, ref token) => {
-                span.hash_stable(hcx, hasher);
-                hash_token(token, hcx, hasher);
+            tokenstream::TokenTree::Token(ref token) => {
+                token.hash_stable(hcx, hasher);
             }
             tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
                 span.hash_stable(hcx, hasher);
@@ -306,70 +305,75 @@ impl_stable_hash_for!(struct token::Lit {
     suffix
 });
 
-fn hash_token<'a, 'gcx, W: StableHasherResult>(
-    token: &token::Token,
-    hcx: &mut StableHashingContext<'a>,
-    hasher: &mut StableHasher<W>,
-) {
-    mem::discriminant(token).hash_stable(hcx, hasher);
-    match *token {
-        token::Token::Eq |
-        token::Token::Lt |
-        token::Token::Le |
-        token::Token::EqEq |
-        token::Token::Ne |
-        token::Token::Ge |
-        token::Token::Gt |
-        token::Token::AndAnd |
-        token::Token::OrOr |
-        token::Token::Not |
-        token::Token::Tilde |
-        token::Token::At |
-        token::Token::Dot |
-        token::Token::DotDot |
-        token::Token::DotDotDot |
-        token::Token::DotDotEq |
-        token::Token::Comma |
-        token::Token::Semi |
-        token::Token::Colon |
-        token::Token::ModSep |
-        token::Token::RArrow |
-        token::Token::LArrow |
-        token::Token::FatArrow |
-        token::Token::Pound |
-        token::Token::Dollar |
-        token::Token::Question |
-        token::Token::SingleQuote |
-        token::Token::Whitespace |
-        token::Token::Comment |
-        token::Token::Eof => {}
-
-        token::Token::BinOp(bin_op_token) |
-        token::Token::BinOpEq(bin_op_token) => {
-            std_hash::Hash::hash(&bin_op_token, hasher);
-        }
+impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          hcx: &mut StableHashingContext<'a>,
+                                          hasher: &mut StableHasher<W>) {
+        mem::discriminant(self).hash_stable(hcx, hasher);
+        match *self {
+            token::Eq |
+            token::Lt |
+            token::Le |
+            token::EqEq |
+            token::Ne |
+            token::Ge |
+            token::Gt |
+            token::AndAnd |
+            token::OrOr |
+            token::Not |
+            token::Tilde |
+            token::At |
+            token::Dot |
+            token::DotDot |
+            token::DotDotDot |
+            token::DotDotEq |
+            token::Comma |
+            token::Semi |
+            token::Colon |
+            token::ModSep |
+            token::RArrow |
+            token::LArrow |
+            token::FatArrow |
+            token::Pound |
+            token::Dollar |
+            token::Question |
+            token::SingleQuote |
+            token::Whitespace |
+            token::Comment |
+            token::Eof => {}
+
+            token::BinOp(bin_op_token) |
+            token::BinOpEq(bin_op_token) => {
+                std_hash::Hash::hash(&bin_op_token, hasher);
+            }
 
-        token::Token::OpenDelim(delim_token) |
-        token::Token::CloseDelim(delim_token) => {
-            std_hash::Hash::hash(&delim_token, hasher);
-        }
-        token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
+            token::OpenDelim(delim_token) |
+            token::CloseDelim(delim_token) => {
+                std_hash::Hash::hash(&delim_token, hasher);
+            }
+            token::Literal(lit) => lit.hash_stable(hcx, hasher),
 
-        token::Token::Ident(ident, is_raw) => {
-            ident.name.hash_stable(hcx, hasher);
-            is_raw.hash_stable(hcx, hasher);
-        }
-        token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
+            token::Ident(name, is_raw) => {
+                name.hash_stable(hcx, hasher);
+                is_raw.hash_stable(hcx, hasher);
+            }
+            token::Lifetime(name) => name.hash_stable(hcx, hasher),
 
-        token::Token::Interpolated(_) => {
-            bug!("interpolated tokens should not be present in the HIR")
-        }
+            token::Interpolated(_) => {
+                bug!("interpolated tokens should not be present in the HIR")
+            }
 
-        token::Token::DocComment(val) |
-        token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
+            token::DocComment(val) |
+            token::Shebang(val) => val.hash_stable(hcx, hasher),
+        }
     }
 }
 
+impl_stable_hash_for!(struct token::Token {
+    kind,
+    span
+});
+
 impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
     MetaItem(meta_item),
     Literal(lit)
diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs
index 937085c8ad8..6e4d0e881f7 100644
--- a/src/librustc_lint/builtin.rs
+++ b/src/librustc_lint/builtin.rs
@@ -1414,15 +1414,9 @@ impl KeywordIdents {
     fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
         for tt in tokens.into_trees() {
             match tt {
-                TokenTree::Token(span, tok) => match tok.ident() {
-                    // only report non-raw idents
-                    Some((ident, false)) => {
-                        self.check_ident_token(cx, UnderMacro(true), ast::Ident {
-                            span: span.substitute_dummy(ident.span),
-                            ..ident
-                        });
-                    }
-                    _ => {},
+                // Only report non-raw idents.
+                TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
+                    self.check_ident_token(cx, UnderMacro(true), ident);
                 }
                 TokenTree::Delimited(_, _, tts) => {
                     self.check_tokens(cx, tts)
diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs
index 92faab192fa..6d0b142fb24 100644
--- a/src/librustc_resolve/build_reduced_graph.rs
+++ b/src/librustc_resolve/build_reduced_graph.rs
@@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
     }
 
     fn visit_token(&mut self, t: Token) {
-        if let Token::Interpolated(nt) = t {
+        if let token::Interpolated(nt) = t.kind {
             if let token::NtExpr(ref expr) = *nt {
                 if let ast::ExprKind::Mac(..) = expr.node {
                     self.visit_invoc(expr.id);
diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs
index e2c93b6d331..5831b0bcd8f 100644
--- a/src/librustc_save_analysis/span_utils.rs
+++ b/src/librustc_save_analysis/span_utils.rs
@@ -5,7 +5,7 @@ use crate::generated_code;
 use std::cell::Cell;
 
 use syntax::parse::lexer::{self, StringReader};
-use syntax::parse::token::{self, Token};
+use syntax::parse::token::{self, TokenKind};
 use syntax_pos::*;
 
 #[derive(Clone)]
@@ -56,15 +56,15 @@ impl<'a> SpanUtils<'a> {
         lexer::StringReader::retokenize(&self.sess.parse_sess, span)
     }
 
-    pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
+    pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
         let mut toks = self.retokenise_span(span);
         loop {
             let next = toks.real_token();
-            if next.tok == token::Eof {
+            if next == token::Eof {
                 return None;
             }
-            if next.tok == tok {
-                return Some(next.sp);
+            if next == tok {
+                return Some(next.span);
             }
         }
     }
@@ -74,12 +74,12 @@ impl<'a> SpanUtils<'a> {
     //     let mut toks = self.retokenise_span(span);
     //     loop {
     //         let ts = toks.real_token();
-    //         if ts.tok == token::Eof {
+    //         if ts == token::Eof {
     //             return None;
     //         }
-    //         if ts.tok == token::Not {
+    //         if ts == token::Not {
     //             let ts = toks.real_token();
-    //             if ts.tok.is_ident() {
+    //             if ts.kind.is_ident() {
     //                 return Some(ts.sp);
     //             } else {
     //                 return None;
@@ -93,12 +93,12 @@ impl<'a> SpanUtils<'a> {
     //     let mut toks = self.retokenise_span(span);
     //     let mut prev = toks.real_token();
     //     loop {
-    //         if prev.tok == token::Eof {
+    //         if prev == token::Eof {
     //             return None;
     //         }
     //         let ts = toks.real_token();
-    //         if ts.tok == token::Not {
-    //             if prev.tok.is_ident() {
+    //         if ts == token::Not {
+    //             if prev.kind.is_ident() {
     //                 return Some(prev.sp);
     //             } else {
     //                 return None;
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 932419c78f2..281bd72deeb 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -12,8 +12,8 @@ use std::io;
 use std::io::prelude::*;
 
 use syntax::source_map::{SourceMap, FilePathMapping};
-use syntax::parse::lexer::{self, TokenAndSpan};
-use syntax::parse::token;
+use syntax::parse::lexer;
+use syntax::parse::token::{self, Token};
 use syntax::parse;
 use syntax::symbol::{kw, sym};
 use syntax_pos::{Span, FileName};
@@ -186,9 +186,9 @@ impl<'a> Classifier<'a> {
     }
 
     /// Gets the next token out of the lexer.
-    fn try_next_token(&mut self) -> Result<TokenAndSpan, HighlightError> {
+    fn try_next_token(&mut self) -> Result<Token, HighlightError> {
         match self.lexer.try_next_token() {
-            Ok(tas) => Ok(tas),
+            Ok(token) => Ok(token),
             Err(_) => Err(HighlightError::LexError),
         }
     }
@@ -205,7 +205,7 @@ impl<'a> Classifier<'a> {
                                    -> Result<(), HighlightError> {
         loop {
             let next = self.try_next_token()?;
-            if next.tok == token::Eof {
+            if next == token::Eof {
                 break;
             }
 
@@ -218,9 +218,9 @@ impl<'a> Classifier<'a> {
     // Handles an individual token from the lexer.
     fn write_token<W: Writer>(&mut self,
                               out: &mut W,
-                              tas: TokenAndSpan)
+                              token: Token)
                               -> Result<(), HighlightError> {
-        let klass = match tas.tok {
+        let klass = match token.kind {
             token::Shebang(s) => {
                 out.string(Escape(&s.as_str()), Class::None)?;
                 return Ok(());
@@ -234,7 +234,7 @@ impl<'a> Classifier<'a> {
             // reference or dereference operator or a reference or pointer type, instead of the
             // bit-and or multiplication operator.
             token::BinOp(token::And) | token::BinOp(token::Star)
-                if self.lexer.peek().tok != token::Whitespace => Class::RefKeyWord,
+                if self.lexer.peek() != &token::Whitespace => Class::RefKeyWord,
 
             // Consider this as part of a macro invocation if there was a
             // leading identifier.
@@ -257,7 +257,7 @@ impl<'a> Classifier<'a> {
             token::Question => Class::QuestionMark,
 
             token::Dollar => {
-                if self.lexer.peek().tok.is_ident() {
+                if self.lexer.peek().kind.is_ident() {
                     self.in_macro_nonterminal = true;
                     Class::MacroNonTerminal
                 } else {
@@ -280,9 +280,9 @@ impl<'a> Classifier<'a> {
                 // as an attribute.
 
                 // Case 1: #![inner_attribute]
-                if self.lexer.peek().tok == token::Not {
+                if self.lexer.peek() == &token::Not {
                     self.try_next_token()?; // NOTE: consumes `!` token!
-                    if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
+                    if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
                         self.in_attribute = true;
                         out.enter_span(Class::Attribute)?;
                     }
@@ -292,7 +292,7 @@ impl<'a> Classifier<'a> {
                 }
 
                 // Case 2: #[outer_attribute]
-                if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
+                if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
                     self.in_attribute = true;
                     out.enter_span(Class::Attribute)?;
                 }
@@ -325,8 +325,8 @@ impl<'a> Classifier<'a> {
             }
 
             // Keywords are also included in the identifier set.
-            token::Ident(ident, is_raw) => {
-                match ident.name {
+            token::Ident(name, is_raw) => {
+                match name {
                     kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
 
                     kw::SelfLower | kw::SelfUpper => Class::Self_,
@@ -335,13 +335,13 @@ impl<'a> Classifier<'a> {
                     sym::Option | sym::Result => Class::PreludeTy,
                     sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
 
-                    _ if tas.tok.is_reserved_ident() => Class::KeyWord,
+                    _ if token.is_reserved_ident() => Class::KeyWord,
 
                     _ => {
                         if self.in_macro_nonterminal {
                             self.in_macro_nonterminal = false;
                             Class::MacroNonTerminal
-                        } else if self.lexer.peek().tok == token::Not {
+                        } else if self.lexer.peek() == &token::Not {
                             self.in_macro = true;
                             Class::Macro
                         } else {
@@ -359,7 +359,7 @@ impl<'a> Classifier<'a> {
 
         // Anything that didn't return above is the simple case where we the
         // class just spans a single token, so we can use the `string` method.
-        out.string(Escape(&self.snip(tas.sp)), klass)?;
+        out.string(Escape(&self.snip(token.span)), klass)?;
 
         Ok(())
     }
diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs
index 0556852c54a..694843ad7f7 100644
--- a/src/librustdoc/passes/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/check_code_block_syntax.rs
@@ -1,5 +1,5 @@
 use errors::Applicability;
-use syntax::parse::lexer::{TokenAndSpan, StringReader as Lexer};
+use syntax::parse::lexer::{StringReader as Lexer};
 use syntax::parse::{ParseSess, token};
 use syntax::source_map::FilePathMapping;
 use syntax_pos::FileName;
@@ -33,8 +33,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
         );
 
         let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
-            while let Ok(TokenAndSpan { tok, .. }) = lexer.try_next_token() {
-                if tok == token::Eof {
+            while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
+                if kind == token::Eof {
                     break;
                 }
             }
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index 48948e4d0d7..edfe097c72f 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned};
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::parser::Parser;
 use crate::parse::{self, ParseSess, PResult};
-use crate::parse::token::{self, Token};
+use crate::parse::token::{self, Token, TokenKind};
 use crate::ptr::P;
 use crate::symbol::{sym, Symbol};
 use crate::ThinVec;
@@ -465,10 +465,10 @@ impl MetaItem {
                 let mod_sep_span = Span::new(last_pos,
                                              segment.ident.span.lo(),
                                              segment.ident.span.ctxt());
-                idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into());
+                idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
             }
-            idents.push(TokenTree::Token(segment.ident.span,
-                                         Token::from_ast_ident(segment.ident)).into());
+            idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
+                                         segment.ident.span).into());
             last_pos = segment.ident.span.hi();
         }
         self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
@@ -480,26 +480,28 @@ impl MetaItem {
     {
         // FIXME: Share code with `parse_path`.
         let path = match tokens.next() {
-            Some(TokenTree::Token(span, token @ Token::Ident(..))) |
-            Some(TokenTree::Token(span, token @ Token::ModSep)) => 'arm: {
-                let mut segments = if let Token::Ident(ident, _) = token {
-                    if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
+            Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
+            Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
+                let mut segments = if let token::Ident(name, _) = kind {
+                    if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
+                            = tokens.peek() {
                         tokens.next();
-                        vec![PathSegment::from_ident(ident.with_span_pos(span))]
+                        vec![PathSegment::from_ident(Ident::new(name, span))]
                     } else {
-                        break 'arm Path::from_ident(ident.with_span_pos(span));
+                        break 'arm Path::from_ident(Ident::new(name, span));
                     }
                 } else {
                     vec![PathSegment::path_root(span)]
                 };
                 loop {
-                    if let Some(TokenTree::Token(span,
-                                                    Token::Ident(ident, _))) = tokens.next() {
-                        segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
+                    if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
+                            = tokens.next() {
+                        segments.push(PathSegment::from_ident(Ident::new(name, span)));
                     } else {
                         return None;
                     }
-                    if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
+                    if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
+                            = tokens.peek() {
                         tokens.next();
                     } else {
                         break;
@@ -508,7 +510,7 @@ impl MetaItem {
                 let span = span.with_hi(segments.last().unwrap().ident.span.hi());
                 Path { span, segments }
             }
-            Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
+            Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
                 token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
                 token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
                 token::Nonterminal::NtPath(ref path) => path.clone(),
@@ -533,7 +535,7 @@ impl MetaItemKind {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
+                let mut vec = vec![TokenTree::token(token::Eq, span).into()];
                 lit.tokens().append_to_tree_and_joint_vec(&mut vec);
                 TokenStream::new(vec)
             }
@@ -541,7 +543,7 @@ impl MetaItemKind {
                 let mut tokens = Vec::new();
                 for (i, item) in list.iter().enumerate() {
                     if i > 0 {
-                        tokens.push(TokenTree::Token(span, Token::Comma).into());
+                        tokens.push(TokenTree::token(token::Comma, span).into());
                     }
                     item.tokens().append_to_tree_and_joint_vec(&mut tokens);
                 }
@@ -558,10 +560,10 @@ impl MetaItemKind {
         where I: Iterator<Item = TokenTree>,
     {
         let delimited = match tokens.peek().cloned() {
-            Some(TokenTree::Token(_, token::Eq)) => {
+            Some(TokenTree::Token(token)) if token == token::Eq => {
                 tokens.next();
-                return if let Some(TokenTree::Token(span, token)) = tokens.next() {
-                    Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
+                return if let Some(TokenTree::Token(token)) = tokens.next() {
+                    Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
                 } else {
                     None
                 };
@@ -579,7 +581,7 @@ impl MetaItemKind {
             let item = NestedMetaItem::from_tokens(&mut tokens)?;
             result.push(item);
             match tokens.next() {
-                None | Some(TokenTree::Token(_, Token::Comma)) => {}
+                None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
                 _ => return None,
             }
         }
@@ -605,8 +607,8 @@ impl NestedMetaItem {
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
         where I: Iterator<Item = TokenTree>,
     {
-        if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
-            if let Ok(lit) = Lit::from_token(&token, span) {
+        if let Some(TokenTree::Token(token)) = tokens.peek() {
+            if let Ok(lit) = Lit::from_token(token) {
                 tokens.next();
                 return Some(NestedMetaItem::Literal(lit));
             }
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index 0c57c23b2b5..9f01b9b9f9b 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -5,7 +5,7 @@ use crate::ast::{self, Ident, Name};
 use crate::source_map;
 use crate::ext::base::{ExtCtxt, MacEager, MacResult};
 use crate::ext::build::AstBuilder;
-use crate::parse::token;
+use crate::parse::token::{self, Token};
 use crate::ptr::P;
 use crate::symbol::kw;
 use crate::tokenstream::{TokenTree};
@@ -34,12 +34,12 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                    token_tree: &[TokenTree])
                                    -> Box<dyn MacResult+'cx> {
     let code = match (token_tree.len(), token_tree.get(0)) {
-        (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
+        (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code,
         _ => unreachable!()
     };
 
     ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
-        match diagnostics.get_mut(&code.name) {
+        match diagnostics.get_mut(&code) {
             // Previously used errors.
             Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => {
                 ecx.struct_span_warn(span, &format!(
@@ -72,12 +72,14 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
         token_tree.get(1),
         token_tree.get(2)
     ) {
-        (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
+        (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => {
             (code, None)
         },
-        (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
-            Some(&TokenTree::Token(_, token::Comma)),
-            Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
+        (3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
+            Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
+            Some(&TokenTree::Token(Token {
+                kind: token::Literal(token::Lit { symbol, .. }), ..
+            }))) => {
             (code, Some(symbol))
         }
         _ => unreachable!()
@@ -112,7 +114,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
             description,
             use_site: None
         };
-        if diagnostics.insert(code.name, info).is_some() {
+        if diagnostics.insert(code, info).is_some() {
             ecx.span_err(span, &format!(
                 "diagnostic code {} already registered", code
             ));
@@ -140,13 +142,13 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                           token_tree: &[TokenTree])
                                           -> Box<dyn MacResult+'cx> {
     assert_eq!(token_tree.len(), 3);
-    let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
+    let (crate_name, ident) = match (&token_tree[0], &token_tree[2]) {
         (
             // Crate name.
-            &TokenTree::Token(_, token::Ident(ref crate_name, _)),
+            &TokenTree::Token(Token { kind: token::Ident(crate_name, _), .. }),
             // DIAGNOSTICS ident.
-            &TokenTree::Token(_, token::Ident(ref name, _))
-        ) => (*&crate_name, name),
+            &TokenTree::Token(Token { kind: token::Ident(name, _), span })
+        ) => (crate_name, Ident::new(name, span)),
         _ => unreachable!()
     };
 
@@ -209,7 +211,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
 
     MacEager::items(smallvec![
         P(ast::Item {
-            ident: *name,
+            ident,
             attrs: Vec::new(),
             id: ast::DUMMY_NODE_ID,
             node: ast::ItemKind::Const(
diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs
index 977e6d45877..598c8459d15 100644
--- a/src/libsyntax/early_buffered_lints.rs
+++ b/src/libsyntax/early_buffered_lints.rs
@@ -3,7 +3,7 @@
 //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
 //! redundant. Later, these types can be converted to types for use by the rest of the compiler.
 
-use crate::syntax::ast::NodeId;
+use crate::ast::NodeId;
 use syntax_pos::MultiSpan;
 
 /// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 4b5b9ff7bbe..61c736662c7 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -265,10 +265,13 @@ impl<F> TTMacroExpander for F
 
         impl MutVisitor for AvoidInterpolatedIdents {
             fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
-                if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
-                    if let token::NtIdent(ident, is_raw) = **nt {
-                        *tt = tokenstream::TokenTree::Token(ident.span,
-                                                            token::Ident(ident, is_raw));
+                if let tokenstream::TokenTree::Token(token) = tt {
+                    if let token::Interpolated(nt) = &token.kind {
+                        if let token::NtIdent(ident, is_raw) = **nt {
+                            *tt = tokenstream::TokenTree::token(
+                                token::Ident(ident.name, is_raw), ident.span
+                            );
+                        }
                     }
                 }
                 mut_visit::noop_visit_tt(tt, self)
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index c2a73b662c6..7cd847eac46 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -10,7 +10,7 @@ use crate::ext::placeholders::{placeholder, PlaceholderExpander};
 use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
 use crate::mut_visit::*;
 use crate::parse::{DirectoryOwnership, PResult, ParseSess};
-use crate::parse::token::{self, Token};
+use crate::parse::token;
 use crate::parse::parser::Parser;
 use crate::ptr::P;
 use crate::symbol::Symbol;
@@ -585,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             }
             AttrProcMacro(ref mac, ..) => {
                 self.gate_proc_macro_attr_item(attr.span, &item);
-                let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
+                let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
                     Annotatable::Item(item) => token::NtItem(item),
                     Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
                     Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
                     Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
                     Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
                     Annotatable::Expr(expr) => token::NtExpr(expr),
-                }))).into();
+                })), DUMMY_SP).into();
                 let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
                 let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
                 let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 7b7cf80760f..82cc9e8ac22 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -74,11 +74,11 @@ pub use NamedMatch::*;
 pub use ParseResult::*;
 use TokenTreeOrTokenTreeSlice::*;
 
-use crate::ast::Ident;
+use crate::ast::{Ident, Name};
 use crate::ext::tt::quoted::{self, TokenTree};
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::{Parser, PathStyle};
-use crate::parse::token::{self, DocComment, Nonterminal, Token};
+use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
 use crate::print::pprust;
 use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{DelimSpan, TokenStream};
@@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> {
     seq_op: Option<quoted::KleeneOp>,
 
     /// The separator if we are in a repetition.
-    sep: Option<Token>,
+    sep: Option<TokenKind>,
 
     /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
     /// before we enter the sequence.
@@ -273,7 +273,7 @@ pub enum ParseResult<T> {
     Success(T),
     /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
     /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
-    Failure(syntax_pos::Span, Token, &'static str),
+    Failure(Token, &'static str),
     /// Fatal error (malformed macro?). Abort compilation.
     Error(syntax_pos::Span, String),
 }
@@ -417,7 +417,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
 
 /// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
 /// other tokens, this is "unexpected token...".
-pub fn parse_failure_msg(tok: Token) -> String {
+pub fn parse_failure_msg(tok: TokenKind) -> String {
     match tok {
         token::Eof => "unexpected end of macro invocation".to_string(),
         _ => format!(
@@ -428,11 +428,11 @@ pub fn parse_failure_msg(tok: Token) -> String {
 }
 
 /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
-fn token_name_eq(t1: &Token, t2: &Token) -> bool {
-    if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
-        id1.name == id2.name && is_raw1 == is_raw2
-    } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) {
-        id1.name == id2.name
+fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool {
+    if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) {
+        name1 == name2 && is_raw1 == is_raw2
+    } else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) {
+        name1 == name2
     } else {
         *t1 == *t2
     }
@@ -467,7 +467,6 @@ fn inner_parse_loop<'root, 'tt>(
     eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
     bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
     token: &Token,
-    span: syntax_pos::Span,
 ) -> ParseResult<()> {
     // Pop items from `cur_items` until it is empty.
     while let Some(mut item) = cur_items.pop() {
@@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>(
                     // Add matches from this repetition to the `matches` of `up`
                     for idx in item.match_lo..item.match_hi {
                         let sub = item.matches[idx].clone();
-                        let span = DelimSpan::from_pair(item.sp_open, span);
+                        let span = DelimSpan::from_pair(item.sp_open, token.span);
                         new_pos.push_match(idx, MatchedSeq(sub, span));
                     }
 
@@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>(
                 TokenTree::MetaVarDecl(_, _, id) => {
                     // Built-in nonterminals never start with these tokens,
                     // so we can eliminate them from consideration.
-                    if may_begin_with(id.name, token) {
+                    if may_begin_with(token, id.name) {
                         bb_items.push(item);
                     }
                 }
@@ -609,7 +608,8 @@ fn inner_parse_loop<'root, 'tt>(
                 //
                 // At the beginning of the loop, if we reach the end of the delimited submatcher,
                 // we pop the stack to backtrack out of the descent.
-                seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
+                seq @ TokenTree::Delimited(..) |
+                seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
                     let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
                     let idx = item.idx;
                     item.stack.push(MatcherTtFrame {
@@ -621,7 +621,7 @@ fn inner_parse_loop<'root, 'tt>(
                 }
 
                 // We just matched a normal token. We can just advance the parser.
-                TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
+                TokenTree::Token(t) if token_name_eq(&t, token) => {
                     item.idx += 1;
                     next_items.push(item);
                 }
@@ -697,10 +697,9 @@ pub fn parse(
             &mut eof_items,
             &mut bb_items,
             &parser.token,
-            parser.span,
         ) {
             Success(_) => {}
-            Failure(sp, tok, t) => return Failure(sp, tok, t),
+            Failure(token, msg) => return Failure(token, msg),
             Error(sp, msg) => return Error(sp, msg),
         }
 
@@ -727,12 +726,11 @@ pub fn parse(
                 );
             } else {
                 return Failure(
-                    if parser.span.is_dummy() {
+                    Token::new(token::Eof, if parser.span.is_dummy() {
                         parser.span
                     } else {
                         sess.source_map().next_point(parser.span)
-                    },
-                    token::Eof,
+                    }),
                     "missing tokens in macro arguments",
                 );
             }
@@ -770,8 +768,7 @@ pub fn parse(
         // then there is a syntax error.
         else if bb_items.is_empty() && next_items.is_empty() {
             return Failure(
-                parser.span,
-                parser.token.clone(),
+                parser.token.take(),
                 "no rules expected this token in macro call",
             );
         }
@@ -807,10 +804,9 @@ pub fn parse(
 
 /// The token is an identifier, but not `_`.
 /// We prohibit passing `_` to macros expecting `ident` for now.
-fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
+fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
     match *token {
-        token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
-            Some((ident, is_raw)),
+        token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
         _ => None,
     }
 }
@@ -819,7 +815,7 @@ fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
 ///
 /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
 /// token. Be conservative (return true) if not sure.
-fn may_begin_with(name: Symbol, token: &Token) -> bool {
+fn may_begin_with(token: &Token, name: Name) -> bool {
     /// Checks whether the non-terminal may contain a single (non-keyword) identifier.
     fn may_be_ident(nt: &token::Nonterminal) -> bool {
         match *nt {
@@ -831,16 +827,16 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
     match name {
         sym::expr => token.can_begin_expr(),
         sym::ty => token.can_begin_type(),
-        sym::ident => get_macro_ident(token).is_some(),
+        sym::ident => get_macro_name(token).is_some(),
         sym::literal => token.can_begin_literal_or_bool(),
-        sym::vis => match *token {
+        sym::vis => match token.kind {
             // The follow-set of :vis + "priv" keyword + interpolated
-            Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
+            token::Comma | token::Ident(..) | token::Interpolated(_) => true,
             _ => token.can_begin_type(),
         },
-        sym::block => match *token {
-            Token::OpenDelim(token::Brace) => true,
-            Token::Interpolated(ref nt) => match **nt {
+        sym::block => match token.kind {
+            token::OpenDelim(token::Brace) => true,
+            token::Interpolated(ref nt) => match **nt {
                 token::NtItem(_)
                 | token::NtPat(_)
                 | token::NtTy(_)
@@ -852,39 +848,39 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
             },
             _ => false,
         },
-        sym::path | sym::meta => match *token {
-            Token::ModSep | Token::Ident(..) => true,
-            Token::Interpolated(ref nt) => match **nt {
+        sym::path | sym::meta => match token.kind {
+            token::ModSep | token::Ident(..) => true,
+            token::Interpolated(ref nt) => match **nt {
                 token::NtPath(_) | token::NtMeta(_) => true,
                 _ => may_be_ident(&nt),
             },
             _ => false,
         },
-        sym::pat => match *token {
-            Token::Ident(..) |               // box, ref, mut, and other identifiers (can stricten)
-            Token::OpenDelim(token::Paren) |    // tuple pattern
-            Token::OpenDelim(token::Bracket) |  // slice pattern
-            Token::BinOp(token::And) |          // reference
-            Token::BinOp(token::Minus) |        // negative literal
-            Token::AndAnd |                     // double reference
-            Token::Literal(..) |                // literal
-            Token::DotDot |                     // range pattern (future compat)
-            Token::DotDotDot |                  // range pattern (future compat)
-            Token::ModSep |                     // path
-            Token::Lt |                         // path (UFCS constant)
-            Token::BinOp(token::Shl) => true,   // path (double UFCS)
-            Token::Interpolated(ref nt) => may_be_ident(nt),
+        sym::pat => match token.kind {
+            token::Ident(..) |               // box, ref, mut, and other identifiers (can stricten)
+            token::OpenDelim(token::Paren) |    // tuple pattern
+            token::OpenDelim(token::Bracket) |  // slice pattern
+            token::BinOp(token::And) |          // reference
+            token::BinOp(token::Minus) |        // negative literal
+            token::AndAnd |                     // double reference
+            token::Literal(..) |                // literal
+            token::DotDot |                     // range pattern (future compat)
+            token::DotDotDot |                  // range pattern (future compat)
+            token::ModSep |                     // path
+            token::Lt |                         // path (UFCS constant)
+            token::BinOp(token::Shl) => true,   // path (double UFCS)
+            token::Interpolated(ref nt) => may_be_ident(nt),
             _ => false,
         },
-        sym::lifetime => match *token {
-            Token::Lifetime(_) => true,
-            Token::Interpolated(ref nt) => match **nt {
+        sym::lifetime => match token.kind {
+            token::Lifetime(_) => true,
+            token::Interpolated(ref nt) => match **nt {
                 token::NtLifetime(_) | token::NtTT(_) => true,
                 _ => false,
             },
             _ => false,
         },
-        _ => match *token {
+        _ => match token.kind {
             token::CloseDelim(_) => false,
             _ => true,
         },
@@ -930,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal {
         sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
         sym::ty => token::NtTy(panictry!(p.parse_ty())),
         // this could be handled like a token, since it is one
-        sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
+        sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
             let span = p.span;
             p.bump();
-            token::NtIdent(Ident::new(ident.name, span), is_raw)
+            token::NtIdent(Ident::new(name, span), is_raw)
         } else {
             let token_str = pprust::token_to_string(&p.token);
             p.fatal(&format!("expected ident, found {}", &token_str)).emit();
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 285c88357a6..7ab51c1eb20 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -11,8 +11,8 @@ use crate::ext::tt::transcribe::transcribe;
 use crate::feature_gate::Features;
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::Parser;
-use crate::parse::token::{self, NtTT};
-use crate::parse::token::Token::*;
+use crate::parse::token::{self, Token, NtTT};
+use crate::parse::token::TokenKind::*;
 use crate::symbol::{Symbol, kw, sym};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
 
@@ -130,9 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
     }
 
     // Which arm's failure should we report? (the one furthest along)
-    let mut best_fail_spot = DUMMY_SP;
-    let mut best_fail_tok = None;
-    let mut best_fail_text = None;
+    let mut best_failure: Option<(Token, &str)> = None;
 
     for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
         let lhs_tt = match *lhs {
@@ -190,21 +188,20 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
                     arm_span,
                 })
             }
-            Failure(sp, tok, t) => if sp.lo() >= best_fail_spot.lo() {
-                best_fail_spot = sp;
-                best_fail_tok = Some(tok);
-                best_fail_text = Some(t);
-            },
+            Failure(token, msg) => match best_failure {
+                Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
+                _ => best_failure = Some((token, msg))
+            }
             Error(err_sp, ref msg) => {
                 cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..])
             }
         }
     }
 
-    let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers"));
-    let span = best_fail_spot.substitute_dummy(sp);
-    let mut err = cx.struct_span_err(span, &best_fail_msg);
-    err.span_label(span, best_fail_text.unwrap_or(&best_fail_msg));
+    let (token, label) = best_failure.expect("ran no matchers");
+    let span = token.span.substitute_dummy(sp);
+    let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind));
+    err.span_label(span, label);
     if let Some(sp) = def_span {
         if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
             err.span_label(cx.source_map().def_span(sp), "when calling this macro");
@@ -270,7 +267,7 @@ pub fn compile(
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
+                quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
             ],
             separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@@ -279,7 +276,7 @@ pub fn compile(
         })),
         // to phase into semicolon-termination instead of semicolon-separation
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
+            tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
@@ -288,11 +285,11 @@ pub fn compile(
 
     let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
         Success(m) => m,
-        Failure(sp, tok, t) => {
-            let s = parse_failure_msg(tok);
-            let sp = sp.substitute_dummy(def.span);
+        Failure(token, msg) => {
+            let s = parse_failure_msg(token.kind);
+            let sp = token.span.substitute_dummy(def.span);
             let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
-            err.span_label(sp, t);
+            err.span_label(sp, msg);
             err.emit();
             FatalError.raise();
         }
@@ -613,7 +610,7 @@ impl FirstSets {
 
                         if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                         subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+                            first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -663,7 +660,7 @@ impl FirstSets {
 
                             if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                             subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+                                first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
                             }
 
                             assert!(first.maybe_empty);
@@ -869,7 +866,7 @@ fn check_matcher_core(sess: &ParseSess,
                 let mut new;
                 let my_suffix = if let Some(ref u) = seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
+                    new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
                     &new
                 } else {
                     &suffix_first
@@ -1015,7 +1012,7 @@ enum IsInFollow {
 fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
     use quoted::TokenTree;
 
-    if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
+    if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
         // closing a token tree can never be matched by any fragment;
         // iow, we always require that `(` and `)` match, etc.
         IsInFollow::Yes
@@ -1033,8 +1030,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             },
             "stmt" | "expr"  => {
                 let tokens = vec!["`=>`", "`,`", "`;`"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         FatArrow | Comma | Semi => IsInFollow::Yes,
                         _ => IsInFollow::No(tokens),
                     },
@@ -1043,11 +1040,10 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             },
             "pat" => {
                 let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(i, false) if i.name == kw::If ||
-                                           i.name == kw::In => IsInFollow::Yes,
+                        Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
                         _ => IsInFollow::No(tokens),
                     },
                     _ => IsInFollow::No(tokens),
@@ -1058,14 +1054,14 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
                     "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
                     "`where`",
                 ];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         OpenDelim(token::DelimToken::Brace) |
                         OpenDelim(token::DelimToken::Bracket) |
                         Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
                         BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(i, false) if i.name == kw::As ||
-                                           i.name == kw::Where => IsInFollow::Yes,
+                        Ident(name, false) if name == kw::As ||
+                                              name == kw::Where => IsInFollow::Yes,
                         _ => IsInFollow::No(tokens),
                     },
                     TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
@@ -1089,12 +1085,11 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             "vis" => {
                 // Explicitly disallow `priv`, on the off chance it comes back.
                 let tokens = vec!["`,`", "an ident", "a type"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         Comma => IsInFollow::Yes,
-                        Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
-                            IsInFollow::Yes,
-                        ref tok => if tok.can_begin_type() {
+                        Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
+                        _ => if token.can_begin_type() {
                             IsInFollow::Yes
                         } else {
                             IsInFollow::No(tokens)
@@ -1150,7 +1145,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
 
 fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
-        quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
+        quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index a029c654659..ec7d7f705d8 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -2,7 +2,8 @@ use crate::ast::NodeId;
 use crate::early_buffered_lints::BufferedEarlyLintId;
 use crate::ext::tt::macro_parser;
 use crate::feature_gate::Features;
-use crate::parse::{token, ParseSess};
+use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::ParseSess;
 use crate::print::pprust;
 use crate::tokenstream::{self, DelimSpan};
 use crate::ast;
@@ -23,12 +24,12 @@ pub struct Delimited {
 
 impl Delimited {
     /// Returns the opening delimiter (possibly `NoDelim`).
-    pub fn open_token(&self) -> token::Token {
+    pub fn open_token(&self) -> TokenKind {
         token::OpenDelim(self.delim)
     }
 
     /// Returns the closing delimiter (possibly `NoDelim`).
-    pub fn close_token(&self) -> token::Token {
+    pub fn close_token(&self) -> TokenKind {
         token::CloseDelim(self.delim)
     }
 
@@ -39,7 +40,7 @@ impl Delimited {
         } else {
             span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
-        TokenTree::Token(open_span, self.open_token())
+        TokenTree::token(self.open_token(), open_span)
     }
 
     /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -49,7 +50,7 @@ impl Delimited {
         } else {
             span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
-        TokenTree::Token(close_span, self.close_token())
+        TokenTree::token(self.close_token(), close_span)
     }
 }
 
@@ -58,7 +59,7 @@ pub struct SequenceRepetition {
     /// The sequence of token trees
     pub tts: Vec<TokenTree>,
     /// The optional separator
-    pub separator: Option<token::Token>,
+    pub separator: Option<TokenKind>,
     /// Whether the sequence can be repeated zero (*), or one or more times (+)
     pub op: KleeneOp,
     /// The number of `Match`s that appear in the sequence (and subsequences)
@@ -81,7 +82,7 @@ pub enum KleeneOp {
 /// are "first-class" token trees. Useful for parsing macros.
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
-    Token(Span, token::Token),
+    Token(Token),
     Delimited(DelimSpan, Lrc<Delimited>),
     /// A kleene-style repetition sequence
     Sequence(DelimSpan, Lrc<SequenceRepetition>),
@@ -144,13 +145,17 @@ impl TokenTree {
     /// Retrieves the `TokenTree`'s span.
     pub fn span(&self) -> Span {
         match *self {
-            TokenTree::Token(sp, _)
-            | TokenTree::MetaVar(sp, _)
-            | TokenTree::MetaVarDecl(sp, _, _) => sp,
-            TokenTree::Delimited(sp, _)
-            | TokenTree::Sequence(sp, _) => sp.entire(),
+            TokenTree::Token(Token { span, .. })
+            | TokenTree::MetaVar(span, _)
+            | TokenTree::MetaVarDecl(span, _, _) => span,
+            TokenTree::Delimited(span, _)
+            | TokenTree::Sequence(span, _) => span.entire(),
         }
     }
+
+    crate fn token(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span))
+    }
 }
 
 /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
@@ -205,20 +210,21 @@ pub fn parse(
         match tree {
             TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
                 let span = match trees.next() {
-                    Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
-                        Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
-                            Some((kind, _)) => {
-                                let span = end_sp.with_lo(start_sp.lo());
-                                result.push(TokenTree::MetaVarDecl(span, ident, kind));
-                                continue;
-                            }
-                            _ => end_sp,
+                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) =>
+                        match trees.next() {
+                            Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
+                                Some((kind, _)) => {
+                                    let span = token.span.with_lo(start_sp.lo());
+                                    result.push(TokenTree::MetaVarDecl(span, ident, kind));
+                                    continue;
+                                }
+                                _ => token.span,
+                            },
+                            tree => tree
+                                .as_ref()
+                                .map(tokenstream::TokenTree::span)
+                                .unwrap_or(span),
                         },
-                        tree => tree
-                            .as_ref()
-                            .map(tokenstream::TokenTree::span)
-                            .unwrap_or(span),
-                    },
                     tree => tree
                         .as_ref()
                         .map(tokenstream::TokenTree::span)
@@ -270,7 +276,7 @@ where
     // Depending on what `tree` is, we could be parsing different parts of a macro
     match tree {
         // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
+        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
             // `tree` is followed by a delimited set of token trees. This indicates the beginning
             // of a repetition sequence in the macro (e.g. `$(pat)*`).
             Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
@@ -316,33 +322,32 @@ where
 
             // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
             // metavariable that names the crate of the invocation.
-            Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
+            Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
                 let (ident, is_raw) = token.ident().unwrap();
-                let span = ident_span.with_lo(span.lo());
+                let span = ident.span.with_lo(span.lo());
                 if ident.name == kw::Crate && !is_raw {
-                    let ident = ast::Ident::new(kw::DollarCrate, ident.span);
-                    TokenTree::Token(span, token::Ident(ident, is_raw))
+                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
                 } else {
                     TokenTree::MetaVar(span, ident)
                 }
             }
 
             // `tree` is followed by a random token. This is an error.
-            Some(tokenstream::TokenTree::Token(span, tok)) => {
+            Some(tokenstream::TokenTree::Token(token)) => {
                 let msg = format!(
                     "expected identifier, found `{}`",
-                    pprust::token_to_string(&tok)
+                    pprust::token_to_string(&token),
                 );
-                sess.span_diagnostic.span_err(span, &msg);
-                TokenTree::MetaVar(span, ast::Ident::invalid())
+                sess.span_diagnostic.span_err(token.span, &msg);
+                TokenTree::MetaVar(token.span, ast::Ident::invalid())
             }
 
             // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::Token(span, token::Dollar),
+            None => TokenTree::token(token::Dollar, span),
         },
 
         // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
+        tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
@@ -366,7 +371,7 @@ where
 
 /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
 /// `None`.
-fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
+fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
     match *token {
         token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
         token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
@@ -380,17 +385,14 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
 /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
 /// - Err(span) if the next token tree is not a token
-fn parse_kleene_op<I>(
-    input: &mut I,
-    span: Span,
-) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span>
+fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span>
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
     match input.next() {
-        Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
-            Some(op) => Ok(Ok((op, span))),
-            None => Ok(Err((tok, span))),
+        Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
+            Some(op) => Ok(Ok((op, token.span))),
+            None => Ok(Err(token)),
         },
         tree => Err(tree
             .as_ref()
@@ -422,7 +424,7 @@ fn parse_sep_and_kleene_op<I>(
     attrs: &[ast::Attribute],
     edition: Edition,
     macro_node_id: NodeId,
-) -> (Option<token::Token>, KleeneOp)
+) -> (Option<TokenKind>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
@@ -447,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>(
     _features: &Features,
     _attrs: &[ast::Attribute],
     macro_node_id: NodeId,
-) -> (Option<token::Token>, KleeneOp)
+) -> (Option<TokenKind>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
@@ -466,7 +468,7 @@ where
             assert_eq!(op, KleeneOp::ZeroOrOne);
 
             // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
-            let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
+            let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() {
                 kleene_op(tok2).is_some()
             } else {
                 false
@@ -504,7 +506,7 @@ where
                     }
 
                     // #2 is a random token (this is an error) :(
-                    Ok(Err((_, _))) => op1_span,
+                    Ok(Err(_)) => op1_span,
 
                     // #2 is not even a token at all :(
                     Err(_) => op1_span,
@@ -524,7 +526,7 @@ where
         }
 
         // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
             // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition,
             // but is allowed in the 2018 edition
             Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
@@ -539,10 +541,10 @@ where
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(tok), op),
+            Ok(Ok((op, _))) => return (Some(token.kind), op),
 
             // #2 is a random token :(
-            Ok(Err((_, span))) => span,
+            Ok(Err(token)) => token.span,
 
             // #2 is not a token at all :(
             Err(span) => span,
@@ -565,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>(
     sess: &ParseSess,
     _features: &Features,
     _attrs: &[ast::Attribute],
-) -> (Option<token::Token>, KleeneOp)
+) -> (Option<TokenKind>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
@@ -580,12 +582,12 @@ where
         Ok(Ok((op, _))) => return (None, op),
 
         // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
             // #2 is the `?` Kleene op, which does not take a separator (error)
             Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
                 // Error!
                 sess.span_diagnostic.span_err(
-                    span,
+                    token.span,
                     "the `?` macro repetition operator does not take a separator",
                 );
 
@@ -594,10 +596,10 @@ where
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(tok), op),
+            Ok(Ok((op, _))) => return (Some(token.kind), op),
 
             // #2 is a random token :(
-            Ok(Err((_, span))) => span,
+            Ok(Err(token)) => token.span,
 
             // #2 is not a token at all :(
             Err(span) => span,
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index e6b49e61937..90a9cc8f34d 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -4,7 +4,7 @@ use crate::ext::expand::Marker;
 use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
 use crate::ext::tt::quoted;
 use crate::mut_visit::noop_visit_tt;
-use crate::parse::token::{self, NtTT, Token};
+use crate::parse::token::{self, NtTT, TokenKind};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 
 use smallvec::{smallvec, SmallVec};
@@ -18,7 +18,7 @@ use std::rc::Rc;
 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
 enum Frame {
     Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
-    Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
+    Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<TokenKind> },
 }
 
 impl Frame {
@@ -119,7 +119,7 @@ pub fn transcribe(
                             Some((tt, _)) => tt.span(),
                             None => DUMMY_SP,
                         };
-                        result.push(TokenTree::Token(prev_span, sep).into());
+                        result.push(TokenTree::token(sep, prev_span).into());
                     }
                     continue;
                 }
@@ -225,7 +225,7 @@ pub fn transcribe(
                             result.push(tt.clone().into());
                         } else {
                             sp = sp.apply_mark(cx.current_expansion.mark);
-                            let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
+                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
                             result.push(token.into());
                         }
                     } else {
@@ -241,8 +241,8 @@ pub fn transcribe(
                     let ident =
                         Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
                     sp = sp.apply_mark(cx.current_expansion.mark);
-                    result.push(TokenTree::Token(sp, token::Dollar).into());
-                    result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
+                    result.push(TokenTree::token(token::Dollar, sp).into());
+                    result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into());
                 }
             }
 
@@ -259,9 +259,9 @@ pub fn transcribe(
 
             // Nothing much to do here. Just push the token to the result, being careful to
             // preserve syntax context.
-            quoted::TokenTree::Token(sp, tok) => {
+            quoted::TokenTree::Token(token) => {
                 let mut marker = Marker(cx.current_expansion.mark);
-                let mut tt = TokenTree::Token(sp, tok);
+                let mut tt = TokenTree::Token(token);
                 noop_visit_tt(&mut tt, &mut marker);
                 result.push(tt.into());
             }
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index 4a95b6f69a1..64415204047 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -1958,9 +1958,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
                 name,
                 template
             ),
-            None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() {
-                // All key-value attributes are restricted to meta-item syntax.
-                attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+            None => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() {
+                if token == token::Eq {
+                    // All key-value attributes are restricted to meta-item syntax.
+                    attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+                }
             }
         }
     }
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index 4229121b3d0..c69364d4e19 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -10,6 +10,7 @@
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
 
+#![feature(bind_by_move_pattern_guards)]
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
 #![feature(nll)]
@@ -136,12 +137,6 @@ pub mod util {
 
 pub mod json;
 
-pub mod syntax {
-    pub use crate::ext;
-    pub use crate::parse;
-    pub use crate::ast;
-}
-
 pub mod ast;
 pub mod attr;
 pub mod source_map;
diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs
index fb1a7a680ba..d2a614c4a54 100644
--- a/src/libsyntax/mut_visit.rs
+++ b/src/libsyntax/mut_visit.rs
@@ -576,9 +576,8 @@ pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T)
 
 pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
     match tt {
-        TokenTree::Token(span, tok) => {
-            vis.visit_span(span);
-            vis.visit_token(tok);
+        TokenTree::Token(token) => {
+            vis.visit_token(token);
         }
         TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
             vis.visit_span(open);
@@ -595,17 +594,26 @@ pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &m
     })
 }
 
-// apply ident visitor if it's an ident, apply other visits to interpolated nodes
+// Apply ident visitor if it's an ident, apply other visits to interpolated nodes.
+// In practice the ident part is not actually used by specific visitors right now,
+// but there's a test below checking that it works.
 pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
-    match t {
-        token::Ident(id, _is_raw) => vis.visit_ident(id),
-        token::Lifetime(id) => vis.visit_ident(id),
+    let Token { kind, span } = t;
+    match kind {
+        token::Ident(name, _) | token::Lifetime(name) => {
+            let mut ident = Ident::new(*name, *span);
+            vis.visit_ident(&mut ident);
+            *name = ident.name;
+            *span = ident.span;
+            return; // avoid visiting the span for the second time
+        }
         token::Interpolated(nt) => {
             let mut nt = Lrc::make_mut(nt);
             vis.visit_interpolated(&mut nt);
         }
         _ => {}
     }
+    vis.visit_span(span);
 }
 
 /// Apply visitor to elements of interpolated nodes.
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index e99a86e807f..d83b76f4d23 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -24,7 +24,7 @@ impl<'a> Parser<'a> {
         let mut just_parsed_doc_comment = false;
         loop {
             debug!("parse_outer_attributes: self.token={:?}", self.token);
-            match self.token {
+            match self.token.kind {
                 token::Pound => {
                     let inner_error_reason = if just_parsed_doc_comment {
                         "an inner attribute is not permitted following an outer doc comment"
@@ -81,7 +81,7 @@ impl<'a> Parser<'a> {
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
                self.token);
-        let (span, path, tokens, style) = match self.token {
+        let (span, path, tokens, style) = match self.token.kind {
             token::Pound => {
                 let lo = self.span;
                 self.bump();
@@ -140,7 +140,7 @@ impl<'a> Parser<'a> {
     /// PATH `=` TOKEN_TREE
     /// The delimiters or `=` are still put into the resulting token stream.
     crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
-        let meta = match self.token {
+        let meta = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
                 _ => None,
@@ -157,9 +157,9 @@ impl<'a> Parser<'a> {
                self.check(&token::OpenDelim(DelimToken::Brace)) {
                    self.parse_token_tree().into()
             } else if self.eat(&token::Eq) {
-                let eq = TokenTree::Token(self.prev_span, token::Eq);
+                let eq = TokenTree::token(token::Eq, self.prev_span);
                 let mut is_interpolated_expr = false;
-                if let token::Interpolated(nt) = &self.token {
+                if let token::Interpolated(nt) = &self.token.kind {
                     if let token::NtExpr(..) = **nt {
                         is_interpolated_expr = true;
                     }
@@ -188,7 +188,7 @@ impl<'a> Parser<'a> {
     crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
         let mut attrs: Vec<ast::Attribute> = vec![];
         loop {
-            match self.token {
+            match self.token.kind {
                 token::Pound => {
                     // Don't even try to parse if it's not an inner attribute.
                     if !self.look_ahead(1, |t| t == &token::Not) {
@@ -236,7 +236,7 @@ impl<'a> Parser<'a> {
     /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
-        let nt_meta = match self.token {
+        let nt_meta = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref e) => Some(e.clone()),
                 _ => None,
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index 5df22f28797..7f0bf4a9050 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -2,8 +2,9 @@ use crate::ast::{
     self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
     Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
 };
-use crate::parse::{SeqSep, token, PResult, Parser};
+use crate::parse::{SeqSep, PResult, Parser};
 use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
+use crate::parse::token::{self, TokenKind};
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::source_map::Spanned;
@@ -201,12 +202,12 @@ impl<'a> Parser<'a> {
             self.span,
             &format!("expected identifier, found {}", self.this_token_descr()),
         );
-        if let token::Ident(ident, false) = &self.token {
-            if ident.is_raw_guess() {
+        if let token::Ident(name, false) = self.token.kind {
+            if Ident::new(name, self.span).is_raw_guess() {
                 err.span_suggestion(
                     self.span,
                     "you can escape reserved keywords to use them as identifiers",
-                    format!("r#{}", ident),
+                    format!("r#{}", name),
                     Applicability::MaybeIncorrect,
                 );
             }
@@ -229,8 +230,8 @@ impl<'a> Parser<'a> {
 
     pub fn expected_one_of_not_found(
         &mut self,
-        edible: &[token::Token],
-        inedible: &[token::Token],
+        edible: &[TokenKind],
+        inedible: &[TokenKind],
     ) -> PResult<'a, bool /* recovered */> {
         fn tokens_to_string(tokens: &[TokenType]) -> String {
             let mut i = tokens.iter();
@@ -294,7 +295,7 @@ impl<'a> Parser<'a> {
                 Applicability::MaybeIncorrect,
             );
         }
-        let sp = if self.token == token::Token::Eof {
+        let sp = if self.token == token::Eof {
             // This is EOF, don't want to point at the following char, but rather the last token
             self.prev_span
         } else {
@@ -368,7 +369,7 @@ impl<'a> Parser<'a> {
 
     /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
-    crate fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
+    crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
         let handler = self.diagnostic();
 
         if let Err(ref mut err) = self.parse_seq_to_before_tokens(
@@ -388,7 +389,7 @@ impl<'a> Parser<'a> {
     /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
     ///                                                        ^^ help: remove extra angle brackets
     /// ```
-    crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) {
+    crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
         // This function is intended to be invoked after parsing a path segment where there are two
         // cases:
         //
@@ -726,13 +727,13 @@ impl<'a> Parser<'a> {
     /// closing delimiter.
     pub fn unexpected_try_recover(
         &mut self,
-        t: &token::Token,
+        t: &TokenKind,
     ) -> PResult<'a, bool /* recovered */> {
         let token_str = pprust::token_to_string(t);
         let this_token_str = self.this_token_descr();
-        let (prev_sp, sp) = match (&self.token, self.subparser_name) {
+        let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
             // Point at the end of the macro call when reaching end of macro arguments.
-            (token::Token::Eof, Some(_)) => {
+            (token::Eof, Some(_)) => {
                 let sp = self.sess.source_map().next_point(self.span);
                 (sp, sp)
             }
@@ -740,14 +741,14 @@ impl<'a> Parser<'a> {
             // This happens when the parser finds an empty TokenStream.
             _ if self.prev_span == DUMMY_SP => (self.span, self.span),
             // EOF, don't want to point at the following char, but rather the last token.
-            (token::Token::Eof, None) => (self.prev_span, self.span),
+            (token::Eof, None) => (self.prev_span, self.span),
             _ => (self.sess.source_map().next_point(self.prev_span), self.span),
         };
         let msg = format!(
             "expected `{}`, found {}",
             token_str,
-            match (&self.token, self.subparser_name) {
-                (token::Token::Eof, Some(origin)) => format!("end of {}", origin),
+            match (&self.token.kind, self.subparser_name) {
+                (token::Eof, Some(origin)) => format!("end of {}", origin),
                 _ => this_token_str,
             },
         );
@@ -903,7 +904,7 @@ impl<'a> Parser<'a> {
 
     crate fn recover_closing_delimiter(
         &mut self,
-        tokens: &[token::Token],
+        tokens: &[TokenKind],
         mut err: DiagnosticBuilder<'a>,
     ) -> PResult<'a, bool> {
         let mut pos = None;
@@ -989,7 +990,7 @@ impl<'a> Parser<'a> {
                break_on_semi, break_on_block);
         loop {
             debug!("recover_stmt_ loop {:?}", self.token);
-            match self.token {
+            match self.token.kind {
                 token::OpenDelim(token::DelimToken::Brace) => {
                     brace_depth += 1;
                     self.bump();
@@ -1074,7 +1075,7 @@ impl<'a> Parser<'a> {
     }
 
     crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
-        if let token::DocComment(_) = self.token {
+        if let token::DocComment(_) = self.token.kind {
             let mut err = self.diagnostic().struct_span_err(
                 self.span,
                 &format!("documentation comments cannot be applied to {}", applied_to),
@@ -1214,8 +1215,8 @@ impl<'a> Parser<'a> {
     }
 
     crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
-        let (span, msg) = match (&self.token, self.subparser_name) {
-            (&token::Token::Eof, Some(origin)) => {
+        let (span, msg) = match (&self.token.kind, self.subparser_name) {
+            (&token::Eof, Some(origin)) => {
                 let sp = self.sess.source_map().next_point(self.span);
                 (sp, format!("expected expression, found end of {}", origin))
             }
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index a06a84f162a..e3d959c2c54 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,6 +1,6 @@
-use crate::ast::{self, Ident};
+use crate::ast;
 use crate::parse::ParseSess;
-use crate::parse::token::{self, Token};
+use crate::parse::token::{self, Token, TokenKind};
 use crate::symbol::{sym, Symbol};
 use crate::parse::unescape;
 use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
@@ -12,7 +12,6 @@ use core::unicode::property::Pattern_White_Space;
 use std::borrow::Cow;
 use std::char;
 use std::iter;
-use std::mem::replace;
 use rustc_data_structures::sync::Lrc;
 use log::debug;
 
@@ -21,21 +20,6 @@ mod tokentrees;
 mod unicode_chars;
 
 #[derive(Clone, Debug)]
-pub struct TokenAndSpan {
-    pub tok: Token,
-    pub sp: Span,
-}
-
-impl Default for TokenAndSpan {
-    fn default() -> Self {
-        TokenAndSpan {
-            tok: token::Whitespace,
-            sp: syntax_pos::DUMMY_SP,
-        }
-    }
-}
-
-#[derive(Clone, Debug)]
 pub struct UnmatchedBrace {
     pub expected_delim: token::DelimToken,
     pub found_delim: token::DelimToken,
@@ -56,8 +40,7 @@ pub struct StringReader<'a> {
     /// Stop reading src at this index.
     crate end_src_index: usize,
     // cached:
-    peek_tok: Token,
-    peek_span: Span,
+    peek_token: Token,
     peek_span_src_raw: Span,
     fatal_errs: Vec<DiagnosticBuilder<'a>>,
     // cache a direct reference to the source text, so that we don't have to
@@ -78,16 +61,7 @@ impl<'a> StringReader<'a> {
         (real, raw)
     }
 
-    fn mk_ident(&self, string: &str) -> Ident {
-        let mut ident = Ident::from_str(string);
-        if let Some(span) = self.override_span {
-            ident.span = span;
-        }
-
-        ident
-    }
-
-    fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
+    fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
         match res {
             Ok(tok) => tok,
             Err(_) => {
@@ -97,18 +71,15 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
+    fn next_token(&mut self) -> Token where Self: Sized {
         let res = self.try_next_token();
         self.unwrap_or_abort(res)
     }
 
     /// Returns the next token. EFFECT: advances the string_reader.
-    pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
+    pub fn try_next_token(&mut self) -> Result<Token, ()> {
         assert!(self.fatal_errs.is_empty());
-        let ret_val = TokenAndSpan {
-            tok: replace(&mut self.peek_tok, token::Whitespace),
-            sp: self.peek_span,
-        };
+        let ret_val = self.peek_token.take();
         self.advance_token()?;
         Ok(ret_val)
     }
@@ -135,10 +106,10 @@ impl<'a> StringReader<'a> {
         return None;
     }
 
-    fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
+    fn try_real_token(&mut self) -> Result<Token, ()> {
         let mut t = self.try_next_token()?;
         loop {
-            match t.tok {
+            match t.kind {
                 token::Whitespace | token::Comment | token::Shebang(_) => {
                     t = self.try_next_token()?;
                 }
@@ -149,7 +120,7 @@ impl<'a> StringReader<'a> {
         Ok(t)
     }
 
-    pub fn real_token(&mut self) -> TokenAndSpan {
+    pub fn real_token(&mut self) -> Token {
         let res = self.try_real_token();
         self.unwrap_or_abort(res)
     }
@@ -173,7 +144,7 @@ impl<'a> StringReader<'a> {
     }
 
     fn fatal(&self, m: &str) -> FatalError {
-        self.fatal_span(self.peek_span, m)
+        self.fatal_span(self.peek_token.span, m)
     }
 
     crate fn emit_fatal_errors(&mut self) {
@@ -194,12 +165,8 @@ impl<'a> StringReader<'a> {
         buffer
     }
 
-    pub fn peek(&self) -> TokenAndSpan {
-        // FIXME(pcwalton): Bad copy!
-        TokenAndSpan {
-            tok: self.peek_tok.clone(),
-            sp: self.peek_span,
-        }
+    pub fn peek(&self) -> &Token {
+        &self.peek_token
     }
 
     /// For comments.rs, which hackily pokes into next_pos and ch
@@ -229,9 +196,7 @@ impl<'a> StringReader<'a> {
             ch: Some('\n'),
             source_file,
             end_src_index: src.len(),
-            // dummy values; not read
-            peek_tok: token::Eof,
-            peek_span: syntax_pos::DUMMY_SP,
+            peek_token: Token::dummy(),
             peek_span_src_raw: syntax_pos::DUMMY_SP,
             src,
             fatal_errs: Vec::new(),
@@ -336,31 +301,24 @@ impl<'a> StringReader<'a> {
         self.err_span_(from_pos, to_pos, &m[..]);
     }
 
-    /// Advance peek_tok and peek_span to refer to the next token, and
+    /// Advance peek_token to refer to the next token, and
     /// possibly update the interner.
     fn advance_token(&mut self) -> Result<(), ()> {
         match self.scan_whitespace_or_comment() {
             Some(comment) => {
-                self.peek_span_src_raw = comment.sp;
-                self.peek_span = comment.sp;
-                self.peek_tok = comment.tok;
+                self.peek_span_src_raw = comment.span;
+                self.peek_token = comment;
             }
             None => {
-                if self.is_eof() {
-                    self.peek_tok = token::Eof;
-                    let (real, raw) = self.mk_sp_and_raw(
-                        self.source_file.end_pos,
-                        self.source_file.end_pos,
-                    );
-                    self.peek_span = real;
-                    self.peek_span_src_raw = raw;
+                let (kind, start_pos, end_pos) = if self.is_eof() {
+                    (token::Eof, self.source_file.end_pos, self.source_file.end_pos)
                 } else {
-                    let start_bytepos = self.pos;
-                    self.peek_tok = self.next_token_inner()?;
-                    let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos);
-                    self.peek_span = real;
-                    self.peek_span_src_raw = raw;
+                    let start_pos = self.pos;
+                    (self.next_token_inner()?, start_pos, self.pos)
                 };
+                let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
+                self.peek_token = Token::new(kind, real);
+                self.peek_span_src_raw = raw;
             }
         }
 
@@ -527,7 +485,7 @@ impl<'a> StringReader<'a> {
 
     /// PRECONDITION: self.ch is not whitespace
     /// Eats any kind of comment.
-    fn scan_comment(&mut self) -> Option<TokenAndSpan> {
+    fn scan_comment(&mut self) -> Option<Token> {
         if let Some(c) = self.ch {
             if c.is_whitespace() {
                 let msg = "called consume_any_line_comment, but there was whitespace";
@@ -563,14 +521,14 @@ impl<'a> StringReader<'a> {
                         self.bump();
                     }
 
-                    let tok = if doc_comment {
+                    let kind = if doc_comment {
                         self.with_str_from(start_bpos, |string| {
                             token::DocComment(Symbol::intern(string))
                         })
                     } else {
                         token::Comment
                     };
-                    Some(TokenAndSpan { tok, sp: self.mk_sp(start_bpos, self.pos) })
+                    Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
                 }
                 Some('*') => {
                     self.bump();
@@ -594,10 +552,10 @@ impl<'a> StringReader<'a> {
                     while !self.ch_is('\n') && !self.is_eof() {
                         self.bump();
                     }
-                    return Some(TokenAndSpan {
-                        tok: token::Shebang(self.name_from(start)),
-                        sp: self.mk_sp(start, self.pos),
-                    });
+                    return Some(Token::new(
+                        token::Shebang(self.name_from(start)),
+                        self.mk_sp(start, self.pos),
+                    ));
                 }
             }
             None
@@ -608,7 +566,7 @@ impl<'a> StringReader<'a> {
 
     /// If there is whitespace, shebang, or a comment, scan it. Otherwise,
     /// return `None`.
-    fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
+    fn scan_whitespace_or_comment(&mut self) -> Option<Token> {
         match self.ch.unwrap_or('\0') {
             // # to handle shebang at start of file -- this is the entry point
             // for skipping over all "junk"
@@ -622,10 +580,7 @@ impl<'a> StringReader<'a> {
                 while is_pattern_whitespace(self.ch) {
                     self.bump();
                 }
-                let c = Some(TokenAndSpan {
-                    tok: token::Whitespace,
-                    sp: self.mk_sp(start_bpos, self.pos),
-                });
+                let c = Some(Token::new(token::Whitespace, self.mk_sp(start_bpos, self.pos)));
                 debug!("scanning whitespace: {:?}", c);
                 c
             }
@@ -634,7 +589,7 @@ impl<'a> StringReader<'a> {
     }
 
     /// Might return a sugared-doc-attr
-    fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
+    fn scan_block_comment(&mut self) -> Option<Token> {
         // block comments starting with "/**" or "/*!" are doc-comments
         let is_doc_comment = self.ch_is('*') || self.ch_is('!');
         let start_bpos = self.pos - BytePos(2);
@@ -671,7 +626,7 @@ impl<'a> StringReader<'a> {
 
         self.with_str_from(start_bpos, |string| {
             // but comments with only "*"s between two "/"s are not
-            let tok = if is_block_doc_comment(string) {
+            let kind = if is_block_doc_comment(string) {
                 let string = if has_cr {
                     self.translate_crlf(start_bpos,
                                         string,
@@ -684,10 +639,7 @@ impl<'a> StringReader<'a> {
                 token::Comment
             };
 
-            Some(TokenAndSpan {
-                tok,
-                sp: self.mk_sp(start_bpos, self.pos),
-            })
+            Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
         })
     }
 
@@ -847,7 +799,7 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    fn binop(&mut self, op: token::BinOpToken) -> Token {
+    fn binop(&mut self, op: token::BinOpToken) -> TokenKind {
         self.bump();
         if self.ch_is('=') {
             self.bump();
@@ -859,7 +811,7 @@ impl<'a> StringReader<'a> {
 
     /// Returns the next token from the string, advances the input past that
     /// token, and updates the interner
-    fn next_token_inner(&mut self) -> Result<Token, ()> {
+    fn next_token_inner(&mut self) -> Result<TokenKind, ()> {
         let c = self.ch;
 
         if ident_start(c) {
@@ -897,17 +849,17 @@ impl<'a> StringReader<'a> {
 
                 return Ok(self.with_str_from(start, |string| {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    let ident = self.mk_ident(string);
+                    let name = ast::Name::intern(string);
 
                     if is_raw_ident {
                         let span = self.mk_sp(raw_start, self.pos);
-                        if !ident.can_be_raw() {
-                            self.err_span(span, &format!("`{}` cannot be a raw identifier", ident));
+                        if !name.can_be_raw() {
+                            self.err_span(span, &format!("`{}` cannot be a raw identifier", name));
                         }
                         self.sess.raw_identifier_spans.borrow_mut().push(span);
                     }
 
-                    token::Ident(ident, is_raw_ident)
+                    token::Ident(name, is_raw_ident)
                 }));
             }
         }
@@ -916,7 +868,7 @@ impl<'a> StringReader<'a> {
             let (kind, symbol) = self.scan_number(c.unwrap());
             let suffix = self.scan_optional_raw_name();
             debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix);
-            return Ok(Token::lit(kind, symbol, suffix));
+            return Ok(TokenKind::lit(kind, symbol, suffix));
         }
 
         match c.expect("next_token_inner called at EOF") {
@@ -1077,16 +1029,9 @@ impl<'a> StringReader<'a> {
                         let symbol = self.name_from(start);
                         self.bump();
                         self.validate_char_escape(start_with_quote);
-                        return Ok(Token::lit(token::Char, symbol, None));
+                        return Ok(TokenKind::lit(token::Char, symbol, None));
                     }
 
-                    // Include the leading `'` in the real identifier, for macro
-                    // expansion purposes. See #12512 for the gory details of why
-                    // this is necessary.
-                    let ident = self.with_str_from(start_with_quote, |lifetime_name| {
-                        self.mk_ident(lifetime_name)
-                    });
-
                     if starts_with_number {
                         // this is a recovered lifetime written `'1`, error but accept it
                         self.err_span_(
@@ -1096,13 +1041,16 @@ impl<'a> StringReader<'a> {
                         );
                     }
 
-                    return Ok(token::Lifetime(ident));
+                    // Include the leading `'` in the real identifier, for macro
+                    // expansion purposes. See #12512 for the gory details of why
+                    // this is necessary.
+                    return Ok(token::Lifetime(self.name_from(start_with_quote)));
                 }
                 let msg = "unterminated character literal";
                 let symbol = self.scan_single_quoted_string(start_with_quote, msg);
                 self.validate_char_escape(start_with_quote);
                 let suffix = self.scan_optional_raw_name();
-                Ok(Token::lit(token::Char, symbol, suffix))
+                Ok(TokenKind::lit(token::Char, symbol, suffix))
             }
             'b' => {
                 self.bump();
@@ -1127,7 +1075,7 @@ impl<'a> StringReader<'a> {
                 };
                 let suffix = self.scan_optional_raw_name();
 
-                Ok(Token::lit(kind, symbol, suffix))
+                Ok(TokenKind::lit(kind, symbol, suffix))
             }
             '"' => {
                 let start_with_quote = self.pos;
@@ -1135,7 +1083,7 @@ impl<'a> StringReader<'a> {
                 let symbol = self.scan_double_quoted_string(msg);
                 self.validate_str_escape(start_with_quote);
                 let suffix = self.scan_optional_raw_name();
-                Ok(Token::lit(token::Str, symbol, suffix))
+                Ok(TokenKind::lit(token::Str, symbol, suffix))
             }
             'r' => {
                 let start_bpos = self.pos;
@@ -1213,7 +1161,7 @@ impl<'a> StringReader<'a> {
                 };
                 let suffix = self.scan_optional_raw_name();
 
-                Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix))
+                Ok(TokenKind::lit(token::StrRaw(hash_count), symbol, suffix))
             }
             '-' => {
                 if self.nextch_is('>') {
@@ -1610,27 +1558,26 @@ mod tests {
                                         &sh,
                                         "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                             .to_string());
-            let id = Ident::from_str("fn");
-            assert_eq!(string_reader.next_token().tok, token::Comment);
-            assert_eq!(string_reader.next_token().tok, token::Whitespace);
+            assert_eq!(string_reader.next_token(), token::Comment);
+            assert_eq!(string_reader.next_token(), token::Whitespace);
             let tok1 = string_reader.next_token();
-            let tok2 = TokenAndSpan {
-                tok: token::Ident(id, false),
-                sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
-            };
-            assert_eq!(tok1.tok, tok2.tok);
-            assert_eq!(tok1.sp, tok2.sp);
-            assert_eq!(string_reader.next_token().tok, token::Whitespace);
+            let tok2 = Token::new(
+                token::Ident(Symbol::intern("fn"), false),
+                Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
+            );
+            assert_eq!(tok1.kind, tok2.kind);
+            assert_eq!(tok1.span, tok2.span);
+            assert_eq!(string_reader.next_token(), token::Whitespace);
             // the 'main' id is already read:
             assert_eq!(string_reader.pos.clone(), BytePos(28));
             // read another token:
             let tok3 = string_reader.next_token();
-            let tok4 = TokenAndSpan {
-                tok: mk_ident("main"),
-                sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
-            };
-            assert_eq!(tok3.tok, tok4.tok);
-            assert_eq!(tok3.sp, tok4.sp);
+            let tok4 = Token::new(
+                mk_ident("main"),
+                Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
+            );
+            assert_eq!(tok3.kind, tok4.kind);
+            assert_eq!(tok3.span, tok4.span);
             // the lparen is already read:
             assert_eq!(string_reader.pos.clone(), BytePos(29))
         })
@@ -1638,19 +1585,19 @@ mod tests {
 
     // check that the given reader produces the desired stream
     // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<Token>) {
+    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
         for expected_tok in &expected {
-            assert_eq!(&string_reader.next_token().tok, expected_tok);
+            assert_eq!(&string_reader.next_token(), expected_tok);
         }
     }
 
     // make the identifier by looking up the string in the interner
-    fn mk_ident(id: &str) -> Token {
-        Token::from_ast_ident(Ident::from_str(id))
+    fn mk_ident(id: &str) -> TokenKind {
+        TokenKind::from_ast_ident(Ident::from_str(id))
     }
 
-    fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token {
-        Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
+    fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
+        TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
     }
 
     #[test]
@@ -1698,7 +1645,7 @@ mod tests {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
                        mk_lit(token::Char, "a", None));
         })
     }
@@ -1708,7 +1655,7 @@ mod tests {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
                        mk_lit(token::Char, " ", None));
         })
     }
@@ -1718,7 +1665,7 @@ mod tests {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
                        mk_lit(token::Char, "\\n", None));
         })
     }
@@ -1728,8 +1675,8 @@ mod tests {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
-                       token::Lifetime(Ident::from_str("'abc")));
+            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
+                       token::Lifetime(Symbol::intern("'abc")));
         })
     }
 
@@ -1738,7 +1685,7 @@ mod tests {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
                        mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
         })
     }
@@ -1750,10 +1697,10 @@ mod tests {
             let sh = mk_sess(sm.clone());
             macro_rules! test {
                 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
-                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
+                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
                                mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
                     // with a whitespace separator:
-                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
+                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
                                mk_lit(token::$tok_type, $tok_contents, None));
                 }}
             }
@@ -1768,11 +1715,11 @@ mod tests {
             test!("1.0", Float, "1.0");
             test!("1.0e10", Float, "1.0e10");
 
-            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
                        mk_lit(token::Integer, "2", Some("us")));
-            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
                        mk_lit(token::StrRaw(3), "raw", Some("suffix")));
-            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
                        mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
         })
     }
@@ -1790,11 +1737,8 @@ mod tests {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
-            match lexer.next_token().tok {
-                token::Comment => {}
-                _ => panic!("expected a comment!"),
-            }
-            assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None));
+            assert_eq!(lexer.next_token(), token::Comment);
+            assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
         })
     }
 
@@ -1805,11 +1749,10 @@ mod tests {
             let sh = mk_sess(sm.clone());
             let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
             let comment = lexer.next_token();
-            assert_eq!(comment.tok, token::Comment);
-            assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
-            assert_eq!(lexer.next_token().tok, token::Whitespace);
-            assert_eq!(lexer.next_token().tok,
-                    token::DocComment(Symbol::intern("/// test")));
+            assert_eq!(comment.kind, token::Comment);
+            assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
+            assert_eq!(lexer.next_token(), token::Whitespace);
+            assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
         })
     }
 }
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index 4bfc5bb16c0..b809f99beba 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -2,15 +2,15 @@ use syntax_pos::Span;
 
 use crate::print::pprust::token_to_string;
 use crate::parse::lexer::{StringReader, UnmatchedBrace};
-use crate::parse::{token, PResult};
+use crate::parse::token::{self, Token};
+use crate::parse::PResult;
 use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
     crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
         let mut tt_reader = TokenTreesReader {
             string_reader: self,
-            token: token::Eof,
-            span: syntax_pos::DUMMY_SP,
+            token: Token::dummy(),
             open_braces: Vec::new(),
             unmatched_braces: Vec::new(),
             matching_delim_spans: Vec::new(),
@@ -23,8 +23,7 @@ impl<'a> StringReader<'a> {
 
 struct TokenTreesReader<'a> {
     string_reader: StringReader<'a>,
-    token: token::Token,
-    span: Span,
+    token: Token,
     /// Stack of open delimiters and their spans. Used for error message.
     open_braces: Vec<(token::DelimToken, Span)>,
     unmatched_braces: Vec<UnmatchedBrace>,
@@ -52,7 +51,7 @@ impl<'a> TokenTreesReader<'a> {
     fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
         let mut tts = vec![];
         loop {
-            if let token::CloseDelim(..) = self.token {
+            if let token::CloseDelim(..) = self.token.kind {
                 return TokenStream::new(tts);
             }
 
@@ -68,11 +67,11 @@ impl<'a> TokenTreesReader<'a> {
 
     fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
         let sm = self.string_reader.sess.source_map();
-        match self.token {
+        match self.token.kind {
             token::Eof => {
                 let msg = "this file contains an un-closed delimiter";
                 let mut err = self.string_reader.sess.span_diagnostic
-                    .struct_span_err(self.span, msg);
+                    .struct_span_err(self.token.span, msg);
                 for &(_, sp) in &self.open_braces {
                     err.span_label(sp, "un-closed delimiter");
                 }
@@ -102,10 +101,10 @@ impl<'a> TokenTreesReader<'a> {
             },
             token::OpenDelim(delim) => {
                 // The span for beginning of the delimited section
-                let pre_span = self.span;
+                let pre_span = self.token.span;
 
                 // Parse the open delimiter.
-                self.open_braces.push((delim, self.span));
+                self.open_braces.push((delim, self.token.span));
                 self.real_token();
 
                 // Parse the token trees within the delimiters.
@@ -114,9 +113,9 @@ impl<'a> TokenTreesReader<'a> {
                 let tts = self.parse_token_trees_until_close_delim();
 
                 // Expand to cover the entire delimited token tree
-                let delim_span = DelimSpan::from_pair(pre_span, self.span);
+                let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
 
-                match self.token {
+                match self.token.kind {
                     // Correct delimiter.
                     token::CloseDelim(d) if d == delim => {
                         let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
@@ -126,7 +125,7 @@ impl<'a> TokenTreesReader<'a> {
                             self.matching_delim_spans.clear();
                         } else {
                             self.matching_delim_spans.push(
-                                (open_brace, open_brace_span, self.span),
+                                (open_brace, open_brace_span, self.token.span),
                             );
                         }
                         // Parse the close delimiter.
@@ -136,16 +135,16 @@ impl<'a> TokenTreesReader<'a> {
                     token::CloseDelim(other) => {
                         let mut unclosed_delimiter = None;
                         let mut candidate = None;
-                        if self.last_unclosed_found_span != Some(self.span) {
+                        if self.last_unclosed_found_span != Some(self.token.span) {
                             // do not complain about the same unclosed delimiter multiple times
-                            self.last_unclosed_found_span = Some(self.span);
+                            self.last_unclosed_found_span = Some(self.token.span);
                             // This is a conservative error: only report the last unclosed
                             // delimiter. The previous unclosed delimiters could actually be
                             // closed! The parser just hasn't gotten to them yet.
                             if let Some(&(_, sp)) = self.open_braces.last() {
                                 unclosed_delimiter = Some(sp);
                             };
-                            if let Some(current_padding) = sm.span_to_margin(self.span) {
+                            if let Some(current_padding) = sm.span_to_margin(self.token.span) {
                                 for (brace, brace_span) in &self.open_braces {
                                     if let Some(padding) = sm.span_to_margin(*brace_span) {
                                         // high likelihood of these two corresponding
@@ -159,7 +158,7 @@ impl<'a> TokenTreesReader<'a> {
                             self.unmatched_braces.push(UnmatchedBrace {
                                 expected_delim: tok,
                                 found_delim: other,
-                                found_span: self.span,
+                                found_span: self.token.span,
                                 unclosed_span: unclosed_delimiter,
                                 candidate_span: candidate,
                             });
@@ -198,12 +197,12 @@ impl<'a> TokenTreesReader<'a> {
                 let token_str = token_to_string(&self.token);
                 let msg = format!("unexpected close delimiter: `{}`", token_str);
                 let mut err = self.string_reader.sess.span_diagnostic
-                    .struct_span_err(self.span, &msg);
-                err.span_label(self.span, "unexpected close delimiter");
+                    .struct_span_err(self.token.span, &msg);
+                err.span_label(self.token.span, "unexpected close delimiter");
                 Err(err)
             },
             _ => {
-                let tt = TokenTree::Token(self.span, self.token.clone());
+                let tt = TokenTree::Token(self.token.take());
                 // Note that testing for joint-ness here is done via the raw
                 // source span as the joint-ness is a property of the raw source
                 // rather than wanting to take `override_span` into account.
@@ -219,8 +218,6 @@ impl<'a> TokenTreesReader<'a> {
     }
 
     fn real_token(&mut self) {
-        let t = self.string_reader.real_token();
-        self.token = t.tok;
-        self.span = t.sp;
+        self.token = self.string_reader.real_token();
     }
 }
diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs
index 18019a89130..7d5356ffe4d 100644
--- a/src/libsyntax/parse/literal.rs
+++ b/src/libsyntax/parse/literal.rs
@@ -1,9 +1,9 @@
 //! Code related to parsing literals.
 
-use crate::ast::{self, Ident, Lit, LitKind};
+use crate::ast::{self, Lit, LitKind};
 use crate::parse::parser::Parser;
 use crate::parse::PResult;
-use crate::parse::token::{self, Token};
+use crate::parse::token::{self, Token, TokenKind};
 use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
 use crate::print::pprust;
 use crate::symbol::{kw, sym, Symbol};
@@ -228,10 +228,10 @@ impl Lit {
     }
 
     /// Converts arbitrary token into an AST literal.
-    crate fn from_token(token: &Token, span: Span) -> Result<Lit, LitError> {
-        let lit = match *token {
-            token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
-                token::Lit::new(token::Bool, ident.name, None),
+    crate fn from_token(token: &Token) -> Result<Lit, LitError> {
+        let lit = match token.kind {
+            token::Ident(name, false) if name == kw::True || name == kw::False =>
+                token::Lit::new(token::Bool, name, None),
             token::Literal(lit) =>
                 lit,
             token::Interpolated(ref nt) => {
@@ -245,7 +245,7 @@ impl Lit {
             _ => return Err(LitError::NotLiteral)
         };
 
-        Lit::from_lit_token(lit, span)
+        Lit::from_lit_token(lit, token.span)
     }
 
     /// Attempts to recover an AST literal from semantic literal.
@@ -258,10 +258,10 @@ impl Lit {
     /// Losslessly convert an AST literal into a token stream.
     crate fn tokens(&self) -> TokenStream {
         let token = match self.token.kind {
-            token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
+            token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
-        TokenTree::Token(self.span, token).into()
+        TokenTree::token(token, self.span).into()
     }
 }
 
@@ -272,44 +272,43 @@ impl<'a> Parser<'a> {
         if self.token == token::Dot {
             // Attempt to recover `.4` as `0.4`.
             recovered = self.look_ahead(1, |t| {
-                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t {
+                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
+                        = t.kind {
                     let next_span = self.look_ahead_span(1);
                     if self.span.hi() == next_span.lo() {
                         let s = String::from("0.") + &symbol.as_str();
-                        let token = Token::lit(token::Float, Symbol::intern(&s), suffix);
-                        return Some((token, self.span.to(next_span)));
+                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
+                        return Some(Token::new(kind, self.span.to(next_span)));
                     }
                 }
                 None
             });
-            if let Some((ref token, span)) = recovered {
+            if let Some(token) = &recovered {
                 self.bump();
                 self.diagnostic()
-                    .struct_span_err(span, "float literals must have an integer part")
+                    .struct_span_err(token.span, "float literals must have an integer part")
                     .span_suggestion(
-                        span,
+                        token.span,
                         "must have an integer part",
-                        pprust::token_to_string(&token),
+                        pprust::token_to_string(token),
                         Applicability::MachineApplicable,
                     )
                     .emit();
             }
         }
 
-        let (token, span) = recovered.as_ref().map_or((&self.token, self.span),
-                                                      |(token, span)| (token, *span));
-
-        match Lit::from_token(token, span) {
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        match Lit::from_token(token) {
             Ok(lit) => {
                 self.bump();
                 Ok(lit)
             }
             Err(LitError::NotLiteral) => {
                 let msg = format!("unexpected token: {}", self.this_token_descr());
-                Err(self.span_fatal(span, &msg))
+                Err(self.span_fatal(token.span, &msg))
             }
             Err(err) => {
-                let lit = token.expect_lit();
+                let (lit, span) = (token.expect_lit(), token.span);
                 self.bump();
                 err.report(&self.sess.span_diagnostic, lit, span);
                 let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix);
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index f7a7aba9ecb..063823bbf4d 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -5,7 +5,8 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
 use crate::source_map::{SourceMap, FilePathMapping};
 use crate::feature_gate::UnstableFeatures;
 use crate::parse::parser::Parser;
-use crate::syntax::parse::parser::emit_unclosed_delims;
+use crate::parse::parser::emit_unclosed_delims;
+use crate::parse::token::TokenKind;
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::diagnostics::plugin::ErrorMap;
 use crate::print::pprust::token_to_string;
@@ -239,7 +240,7 @@ fn maybe_source_file_to_parser(
     let mut parser = stream_to_parser(sess, stream, None);
     parser.unclosed_delims = unclosed_delims;
     if parser.token == token::Eof && parser.span.is_dummy() {
-        parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
+        parser.token.span = Span::new(end_pos, end_pos, parser.span.ctxt());
     }
 
     Ok(parser)
@@ -311,7 +312,7 @@ pub fn maybe_file_to_stream(
             for unmatched in unmatched_braces {
                 let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
                     "incorrect close delimiter: `{}`",
-                    token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+                    token_to_string(&token::CloseDelim(unmatched.found_delim)),
                 ));
                 db.span_label(unmatched.found_span, "incorrect close delimiter");
                 if let Some(sp) = unmatched.candidate_span {
@@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>(
 /// A sequence separator.
 pub struct SeqSep {
     /// The seperator token.
-    pub sep: Option<token::Token>,
+    pub sep: Option<TokenKind>,
     /// `true` if a trailing separator is allowed.
     pub trailing_sep_allowed: bool,
 }
 
 impl SeqSep {
-    pub fn trailing_allowed(t: token::Token) -> SeqSep {
+    pub fn trailing_allowed(t: TokenKind) -> SeqSep {
         SeqSep {
             sep: Some(t),
             trailing_sep_allowed: true,
@@ -382,10 +383,12 @@ impl SeqSep {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::ast::{self, Ident, PatKind};
+    use crate::ast::{self, Name, PatKind};
     use crate::attr::first_attr_value_str_by_name;
     use crate::ptr::P;
+    use crate::parse::token::Token;
     use crate::print::pprust::item_to_string;
+    use crate::symbol::{kw, sym};
     use crate::tokenstream::{DelimSpan, TokenTree};
     use crate::util::parser_testing::string_to_stream;
     use crate::util::parser_testing::{string_to_expr, string_to_item};
@@ -417,8 +420,6 @@ mod tests {
     #[test]
     fn string_to_tts_macro () {
         with_default_globals(|| {
-            use crate::symbol::sym;
-
             let tts: Vec<_> =
                 string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
             let tts: &[TokenTree] = &tts[..];
@@ -426,19 +427,20 @@ mod tests {
             match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
                 (
                     4,
-                    Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
-                    Some(&TokenTree::Token(_, token::Not)),
-                    Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
+                    Some(&TokenTree::Token(Token {
+                        kind: token::Ident(name_macro_rules, false), ..
+                    })),
+                    Some(&TokenTree::Token(Token { kind: token::Not, .. })),
+                    Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
                     Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
                 )
-                if name_macro_rules.name == sym::macro_rules
-                && name_zip.name.as_str() == "zip" => {
+                if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => {
                     let tts = &macro_tts.trees().collect::<Vec<_>>();
                     match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                         (
                             3,
                             Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
-                            Some(&TokenTree::Token(_, token::FatArrow)),
+                            Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })),
                             Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
                         )
                         if macro_delim == token::Paren => {
@@ -446,20 +448,24 @@ mod tests {
                             match (tts.len(), tts.get(0), tts.get(1)) {
                                 (
                                     2,
-                                    Some(&TokenTree::Token(_, token::Dollar)),
-                                    Some(&TokenTree::Token(_, token::Ident(ident, false))),
+                                    Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
+                                    Some(&TokenTree::Token(Token {
+                                        kind: token::Ident(name, false), ..
+                                    })),
                                 )
-                                if first_delim == token::Paren && ident.name.as_str() == "a" => {},
+                                if first_delim == token::Paren && name.as_str() == "a" => {},
                                 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                             }
                             let tts = &second_tts.trees().collect::<Vec<_>>();
                             match (tts.len(), tts.get(0), tts.get(1)) {
                                 (
                                     2,
-                                    Some(&TokenTree::Token(_, token::Dollar)),
-                                    Some(&TokenTree::Token(_, token::Ident(ident, false))),
+                                    Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
+                                    Some(&TokenTree::Token(Token {
+                                        kind: token::Ident(name, false), ..
+                                    })),
                                 )
-                                if second_delim == token::Paren && ident.name.as_str() == "a" => {},
+                                if second_delim == token::Paren && name.as_str() == "a" => {},
                                 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
                             }
                         },
@@ -477,26 +483,23 @@ mod tests {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
             let expected = TokenStream::new(vec![
-                TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
-                TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
+                TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
                     TokenStream::new(vec![
-                        TokenTree::Token(sp(6, 7),
-                                         token::Ident(Ident::from_str("b"), false)).into(),
-                        TokenTree::Token(sp(8, 9), token::Colon).into(),
-                        TokenTree::Token(sp(10, 13),
-                                         token::Ident(Ident::from_str("i32"), false)).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
+                        TokenTree::token(token::Colon, sp(8, 9)).into(),
+                        TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
                     ]).into(),
                 ).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
                     TokenStream::new(vec![
-                        TokenTree::Token(sp(17, 18),
-                                         token::Ident(Ident::from_str("b"), false)).into(),
-                        TokenTree::Token(sp(18, 19), token::Semi).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
+                        TokenTree::token(token::Semi, sp(18, 19)).into(),
                     ]).into(),
                 ).into()
             ]);
@@ -603,8 +606,6 @@ mod tests {
 
     #[test] fn crlf_doc_comments() {
         with_default_globals(|| {
-            use crate::symbol::sym;
-
             let sess = ParseSess::new(FilePathMapping::empty());
 
             let name_1 = FileName::Custom("crlf_source_1".to_string());
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 790013f6eb1..43e7c9330e4 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -36,9 +36,9 @@ use crate::{ast, attr};
 use crate::ext::base::DummyResult;
 use crate::source_map::{self, SourceMap, Spanned, respan};
 use crate::parse::{SeqSep, classify, literal, token};
-use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
+use crate::parse::lexer::UnmatchedBrace;
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use crate::parse::token::DelimToken;
+use crate::parse::token::{Token, TokenKind, DelimToken};
 use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
 use crate::util::parser::{AssocOp, Fixity};
 use crate::print::pprust;
@@ -57,6 +57,7 @@ use log::debug;
 use std::borrow::Cow;
 use std::cmp;
 use std::mem;
+use std::ops::Deref;
 use std::path::{self, Path, PathBuf};
 use std::slice;
 
@@ -121,7 +122,7 @@ crate enum BlockMode {
 /// `token::Interpolated` tokens.
 macro_rules! maybe_whole_expr {
     ($p:expr) => {
-        if let token::Interpolated(nt) = &$p.token {
+        if let token::Interpolated(nt) = &$p.token.kind {
             match &**nt {
                 token::NtExpr(e) | token::NtLiteral(e) => {
                     let e = e.clone();
@@ -147,7 +148,7 @@ macro_rules! maybe_whole_expr {
 /// As maybe_whole_expr, but for things other than expressions
 macro_rules! maybe_whole {
     ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
-        if let token::Interpolated(nt) = &$p.token {
+        if let token::Interpolated(nt) = &$p.token.kind {
             if let token::$constructor(x) = &**nt {
                 let $x = x.clone();
                 $p.bump();
@@ -161,7 +162,7 @@ macro_rules! maybe_whole {
 macro_rules! maybe_recover_from_interpolated_ty_qpath {
     ($self: expr, $allow_qpath_recovery: expr) => {
         if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
-            if let token::Interpolated(nt) = &$self.token {
+            if let token::Interpolated(nt) = &$self.token.kind {
                 if let token::NtTy(ty) = &**nt {
                     let ty = ty.clone();
                     $self.bump();
@@ -196,14 +197,17 @@ enum PrevTokenKind {
 #[derive(Clone)]
 pub struct Parser<'a> {
     pub sess: &'a ParseSess,
-    /// The current token.
-    pub token: token::Token,
-    /// The span of the current token.
-    pub span: Span,
+    /// The current normalized token.
+    /// "Normalized" means that some interpolated tokens
+    /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced
+    /// with non-interpolated identifier and lifetime tokens they refer to.
+    /// Perhaps the normalized / non-normalized setup can be simplified somehow.
+    pub token: Token,
+    /// Span of the current non-normalized token.
     meta_var_span: Option<Span>,
-    /// The span of the previous token.
+    /// Span of the previous non-normalized token.
     pub prev_span: Span,
-    /// The kind of the previous troken.
+    /// Kind of the previous normalized token (in simplified form).
     prev_token_kind: PrevTokenKind,
     restrictions: Restrictions,
     /// Used to determine the path to externally loaded source files.
@@ -242,6 +246,15 @@ impl<'a> Drop for Parser<'a> {
     }
 }
 
+// FIXME: Parser uses `self.span` all the time.
+// Remove this impl if you think that using `self.token.span` instead is acceptable.
+impl Deref for Parser<'_> {
+    type Target = Token;
+    fn deref(&self) -> &Self::Target {
+        &self.token
+    }
+}
+
 #[derive(Clone)]
 crate struct TokenCursor {
     crate frame: TokenCursorFrame,
@@ -295,7 +308,7 @@ impl TokenCursorFrame {
 }
 
 impl TokenCursor {
-    fn next(&mut self) -> TokenAndSpan {
+    fn next(&mut self) -> Token {
         loop {
             let tree = if !self.frame.open_delim {
                 self.frame.open_delim = true;
@@ -309,7 +322,7 @@ impl TokenCursor {
                 self.frame = frame;
                 continue
             } else {
-                return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
+                return Token::new(token::Eof, DUMMY_SP);
             };
 
             match self.frame.last_token {
@@ -318,7 +331,7 @@ impl TokenCursor {
             }
 
             match tree {
-                TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
+                TokenTree::Token(token) => return token,
                 TokenTree::Delimited(sp, delim, tts) => {
                     let frame = TokenCursorFrame::new(sp, delim, &tts);
                     self.stack.push(mem::replace(&mut self.frame, frame));
@@ -327,9 +340,9 @@ impl TokenCursor {
         }
     }
 
-    fn next_desugared(&mut self) -> TokenAndSpan {
-        let (sp, name) = match self.next() {
-            TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
+    fn next_desugared(&mut self) -> Token {
+        let (name, sp) = match self.next() {
+            Token { kind: token::DocComment(name), span } => (name, span),
             tok => return tok,
         };
 
@@ -353,11 +366,11 @@ impl TokenCursor {
             delim_span,
             token::Bracket,
             [
-                TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
-                TokenTree::Token(sp, token::Eq),
-                TokenTree::Token(sp, token::Token::lit(
+                TokenTree::token(token::Ident(sym::doc, false), sp),
+                TokenTree::token(token::Eq, sp),
+                TokenTree::token(TokenKind::lit(
                     token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
-                )),
+                ), sp),
             ]
             .iter().cloned().collect::<TokenStream>().into(),
         );
@@ -366,10 +379,10 @@ impl TokenCursor {
             delim_span,
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
-                [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+                [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::Token(sp, token::Pound), body]
+                [TokenTree::token(token::Pound, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             },
         )));
@@ -380,7 +393,7 @@ impl TokenCursor {
 
 #[derive(Clone, PartialEq)]
 crate enum TokenType {
-    Token(token::Token),
+    Token(TokenKind),
     Keyword(Symbol),
     Operator,
     Lifetime,
@@ -410,7 +423,7 @@ impl TokenType {
 ///
 /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
 /// that `IDENT` is not the ident of a fn trait.
-fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
+fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool {
     t == &token::ModSep || t == &token::Lt ||
     t == &token::BinOp(token::Shl)
 }
@@ -468,8 +481,7 @@ impl<'a> Parser<'a> {
     ) -> Self {
         let mut parser = Parser {
             sess,
-            token: token::Whitespace,
-            span: DUMMY_SP,
+            token: Token::dummy(),
             prev_span: DUMMY_SP,
             meta_var_span: None,
             prev_token_kind: PrevTokenKind::Other,
@@ -498,9 +510,7 @@ impl<'a> Parser<'a> {
             subparser_name,
         };
 
-        let tok = parser.next_tok();
-        parser.token = tok.tok;
-        parser.span = tok.sp;
+        parser.token = parser.next_tok();
 
         if let Some(directory) = directory {
             parser.directory = directory;
@@ -515,15 +525,15 @@ impl<'a> Parser<'a> {
         parser
     }
 
-    fn next_tok(&mut self) -> TokenAndSpan {
+    fn next_tok(&mut self) -> Token {
         let mut next = if self.desugar_doc_comments {
             self.token_cursor.next_desugared()
         } else {
             self.token_cursor.next()
         };
-        if next.sp.is_dummy() {
+        if next.span.is_dummy() {
             // Tweak the location for better diagnostics, but keep syntactic context intact.
-            next.sp = self.prev_span.with_ctxt(next.sp.ctxt());
+            next.span = self.prev_span.with_ctxt(next.span.ctxt());
         }
         next
     }
@@ -534,10 +544,10 @@ impl<'a> Parser<'a> {
     }
 
     crate fn token_descr(&self) -> Option<&'static str> {
-        Some(match &self.token {
-            t if t.is_special_ident() => "reserved identifier",
-            t if t.is_used_keyword() => "keyword",
-            t if t.is_unused_keyword() => "reserved keyword",
+        Some(match &self.token.kind {
+            _ if self.token.is_special_ident() => "reserved identifier",
+            _ if self.token.is_used_keyword() => "keyword",
+            _ if self.token.is_unused_keyword() => "reserved keyword",
             token::DocComment(..) => "doc comment",
             _ => return None,
         })
@@ -559,7 +569,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
+    pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
@@ -577,8 +587,8 @@ impl<'a> Parser<'a> {
     /// anything.  Signal a fatal error if next token is unexpected.
     pub fn expect_one_of(
         &mut self,
-        edible: &[token::Token],
-        inedible: &[token::Token],
+        edible: &[TokenKind],
+        inedible: &[TokenKind],
     ) -> PResult<'a, bool /* recovered */> {
         if edible.contains(&self.token) {
             self.bump();
@@ -612,8 +622,8 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(ident, _) => {
+        match self.token.kind {
+            token::Ident(name, _) => {
                 if self.token.is_reserved_ident() {
                     let mut err = self.expected_ident_found();
                     if recover {
@@ -624,7 +634,7 @@ impl<'a> Parser<'a> {
                 }
                 let span = self.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => {
                 Err(if self.prev_token_kind == PrevTokenKind::DocComment {
@@ -640,14 +650,14 @@ impl<'a> Parser<'a> {
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
     /// encountered.
-    crate fn check(&mut self, tok: &token::Token) -> bool {
+    crate fn check(&mut self, tok: &TokenKind) -> bool {
         let is_present = self.token == *tok;
         if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
         is_present
     }
 
     /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
-    pub fn eat(&mut self, tok: &token::Token) -> bool {
+    pub fn eat(&mut self, tok: &TokenKind) -> bool {
         let is_present = self.check(tok);
         if is_present { self.bump() }
         is_present
@@ -732,7 +742,7 @@ impl<'a> Parser<'a> {
     /// See issue #47856 for an example of when this may occur.
     fn eat_plus(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::Plus) => {
                 self.bump();
                 true
@@ -763,7 +773,7 @@ impl<'a> Parser<'a> {
     /// `&` and continues. If an `&` is not seen, signals an error.
     fn expect_and(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::And) => {
                 self.bump();
                 Ok(())
@@ -780,7 +790,7 @@ impl<'a> Parser<'a> {
     /// `|` and continues. If an `|` is not seen, signals an error.
     fn expect_or(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::Or) => {
                 self.bump();
                 Ok(())
@@ -805,7 +815,7 @@ impl<'a> Parser<'a> {
     /// starting token.
     fn eat_lt(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Token(token::Lt));
-        let ate = match self.token {
+        let ate = match self.token.kind {
             token::Lt => {
                 self.bump();
                 true
@@ -845,7 +855,7 @@ impl<'a> Parser<'a> {
     /// with a single `>` and continues. If a `>` is not seen, signals an error.
     fn expect_gt(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::Gt));
-        let ate = match self.token {
+        let ate = match self.token.kind {
             token::Gt => {
                 self.bump();
                 Some(())
@@ -883,7 +893,7 @@ impl<'a> Parser<'a> {
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     pub fn parse_seq_to_end<T, F>(&mut self,
-                                  ket: &token::Token,
+                                  ket: &TokenKind,
                                   sep: SeqSep,
                                   f: F)
                                   -> PResult<'a, Vec<T>> where
@@ -901,7 +911,7 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     pub fn parse_seq_to_before_end<T, F>(
         &mut self,
-        ket: &token::Token,
+        ket: &TokenKind,
         sep: SeqSep,
         f: F,
     ) -> PResult<'a, (Vec<T>, bool)>
@@ -912,7 +922,7 @@ impl<'a> Parser<'a> {
 
     crate fn parse_seq_to_before_tokens<T, F>(
         &mut self,
-        kets: &[&token::Token],
+        kets: &[&TokenKind],
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: F,
@@ -928,7 +938,7 @@ impl<'a> Parser<'a> {
                     TokenExpectType::NoExpect => self.token == **k,
                 }
             }) {
-            match self.token {
+            match self.token.kind {
                 token::CloseDelim(..) | token::Eof => break,
                 _ => {}
             };
@@ -986,8 +996,8 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_unspanned_seq<T, F>(
         &mut self,
-        bra: &token::Token,
-        ket: &token::Token,
+        bra: &TokenKind,
+        ket: &TokenKind,
         sep: SeqSep,
         f: F,
     ) -> PResult<'a, Vec<T>> where
@@ -1011,7 +1021,7 @@ impl<'a> Parser<'a> {
         self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
 
         // Record last token kind for possible error recovery.
-        self.prev_token_kind = match self.token {
+        self.prev_token_kind = match self.token.kind {
             token::DocComment(..) => PrevTokenKind::DocComment,
             token::Comma => PrevTokenKind::Comma,
             token::BinOp(token::Plus) => PrevTokenKind::Plus,
@@ -1022,9 +1032,7 @@ impl<'a> Parser<'a> {
             _ => PrevTokenKind::Other,
         };
 
-        let next = self.next_tok();
-        self.span = next.sp;
-        self.token = next.tok;
+        self.token = self.next_tok();
         self.expected_tokens.clear();
         // check after each token
         self.process_potential_macro_variable();
@@ -1032,30 +1040,31 @@ impl<'a> Parser<'a> {
 
     /// Advance the parser using provided token as a next one. Use this when
     /// consuming a part of a token. For example a single `<` from `<<`.
-    fn bump_with(&mut self, next: token::Token, span: Span) {
+    fn bump_with(&mut self, next: TokenKind, span: Span) {
         self.prev_span = self.span.with_hi(span.lo());
         // It would be incorrect to record the kind of the current token, but
         // fortunately for tokens currently using `bump_with`, the
         // prev_token_kind will be of no use anyway.
         self.prev_token_kind = PrevTokenKind::Other;
-        self.span = span;
-        self.token = next;
+        self.token = Token::new(next, span);
         self.expected_tokens.clear();
     }
 
     pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
-        F: FnOnce(&token::Token) -> R,
+        F: FnOnce(&Token) -> R,
     {
         if dist == 0 {
-            return f(&self.token)
+            return f(&self.token);
         }
 
-        f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+        let frame = &self.token_cursor.frame;
+        f(&match frame.tree_cursor.look_ahead(dist - 1) {
             Some(tree) => match tree {
-                TokenTree::Token(_, tok) => tok,
-                TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
-            },
-            None => token::CloseDelim(self.token_cursor.frame.delim),
+                TokenTree::Token(token) => token,
+                TokenTree::Delimited(dspan, delim, _) =>
+                    Token::new(token::OpenDelim(delim), dspan.open),
+            }
+            None => Token::new(token::CloseDelim(frame.delim), frame.span.close)
         })
     }
 
@@ -1065,7 +1074,7 @@ impl<'a> Parser<'a> {
         }
 
         match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
-            Some(TokenTree::Token(span, _)) => span,
+            Some(TokenTree::Token(token)) => token.span,
             Some(TokenTree::Delimited(span, ..)) => span.entire(),
             None => self.look_ahead_span(dist - 1),
         }
@@ -1209,7 +1218,7 @@ impl<'a> Parser<'a> {
                 decl,
             };
 
-            let body = match self.token {
+            let body = match self.token.kind {
                 token::Semi => {
                     self.bump();
                     *at_end = true;
@@ -1477,7 +1486,7 @@ impl<'a> Parser<'a> {
     }
 
     fn is_named_argument(&self) -> bool {
-        let offset = match self.token {
+        let offset = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
                 _ => 0,
@@ -1612,22 +1621,22 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
+        match self.token.kind {
+            token::Ident(name, _) if name.is_path_segment_keyword() => {
                 let span = self.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => self.parse_ident(),
         }
     }
 
     fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(ident, false) if ident.name == kw::Underscore => {
+        match self.token.kind {
+            token::Ident(name, false) if name == kw::Underscore => {
                 let span = self.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => self.parse_ident(),
         }
@@ -1710,7 +1719,7 @@ impl<'a> Parser<'a> {
     /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
     /// attributes.
     pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
-        let meta_ident = match self.token {
+        let meta_ident = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref meta) => match meta.node {
                     ast::MetaItemKind::Word => Some(meta.path.clone()),
@@ -1763,7 +1772,7 @@ impl<'a> Parser<'a> {
     fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
         let ident = self.parse_path_segment_ident()?;
 
-        let is_args_start = |token: &token::Token| match *token {
+        let is_args_start = |token: &TokenKind| match *token {
             token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
             | token::LArrow => true,
             _ => false,
@@ -1859,7 +1868,8 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_field_name(&mut self) -> PResult<'a, Ident> {
-        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
+        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
+                self.token.kind {
             self.expect_no_suffix(self.span, "a tuple index", suffix);
             self.bump();
             Ok(Ident::new(symbol, self.prev_span))
@@ -1949,7 +1959,7 @@ impl<'a> Parser<'a> {
     }
 
     fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
-        let delim = match self.token {
+        let delim = match self.token.kind {
             token::OpenDelim(delim) => delim,
             _ => {
                 let msg = "expected open delimiter";
@@ -1992,8 +2002,8 @@ impl<'a> Parser<'a> {
 
         let ex: ExprKind;
 
-        // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
-        match self.token {
+        // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr().
+        match self.token.kind {
             token::OpenDelim(token::Paren) => {
                 self.bump();
 
@@ -2363,13 +2373,11 @@ impl<'a> Parser<'a> {
             }
 
             let mut recovery_field = None;
-            if let token::Ident(ident, _) = self.token {
+            if let token::Ident(name, _) = self.token.kind {
                 if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
                     // Use in case of error after field-looking code: `S { foo: () with a }`
-                    let mut ident = ident.clone();
-                    ident.span = self.span;
                     recovery_field = Some(ast::Field {
-                        ident,
+                        ident: Ident::new(name, self.span),
                         span: self.span,
                         expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
                         is_shorthand: false,
@@ -2503,7 +2511,7 @@ impl<'a> Parser<'a> {
         let segment = self.parse_path_segment(PathStyle::Expr)?;
         self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
 
-        Ok(match self.token {
+        Ok(match self.token.kind {
             token::OpenDelim(token::Paren) => {
                 // Method call `expr.f()`
                 let mut args = self.parse_unspanned_seq(
@@ -2542,7 +2550,7 @@ impl<'a> Parser<'a> {
 
             // expr.f
             if self.eat(&token::Dot) {
-                match self.token {
+                match self.token.kind {
                     token::Ident(..) => {
                         e = self.parse_dot_suffix(e, lo)?;
                     }
@@ -2594,7 +2602,7 @@ impl<'a> Parser<'a> {
                 continue;
             }
             if self.expr_is_complete(&e) { break; }
-            match self.token {
+            match self.token.kind {
                 // expr(...)
                 token::OpenDelim(token::Paren) => {
                     let seq = self.parse_unspanned_seq(
@@ -2627,12 +2635,12 @@ impl<'a> Parser<'a> {
     }
 
     crate fn process_potential_macro_variable(&mut self) {
-        let (token, span) = match self.token {
+        self.token = match self.token.kind {
             token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
                              self.look_ahead(1, |t| t.is_ident()) => {
                 self.bump();
-                let name = match self.token {
-                    token::Ident(ident, _) => ident,
+                let name = match self.token.kind {
+                    token::Ident(name, _) => name,
                     _ => unreachable!()
                 };
                 let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
@@ -2646,24 +2654,24 @@ impl<'a> Parser<'a> {
                 // Interpolated identifier and lifetime tokens are replaced with usual identifier
                 // and lifetime tokens, so the former are never encountered during normal parsing.
                 match **nt {
-                    token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
-                    token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
+                    token::NtIdent(ident, is_raw) =>
+                        Token::new(token::Ident(ident.name, is_raw), ident.span),
+                    token::NtLifetime(ident) =>
+                        Token::new(token::Lifetime(ident.name), ident.span),
                     _ => return,
                 }
             }
             _ => return,
         };
-        self.token = token;
-        self.span = span;
     }
 
     /// Parses a single token tree from the input.
     crate fn parse_token_tree(&mut self) -> TokenTree {
-        match self.token {
+        match self.token.kind {
             token::OpenDelim(..) => {
                 let frame = mem::replace(&mut self.token_cursor.frame,
                                          self.token_cursor.stack.pop().unwrap());
-                self.span = frame.span.entire();
+                self.token.span = frame.span.entire();
                 self.bump();
                 TokenTree::Delimited(
                     frame.span,
@@ -2673,9 +2681,9 @@ impl<'a> Parser<'a> {
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
-                let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
+                let token = self.token.take();
                 self.bump();
-                TokenTree::Token(span, token)
+                TokenTree::Token(token)
             }
         }
     }
@@ -2692,7 +2700,7 @@ impl<'a> Parser<'a> {
     pub fn parse_tokens(&mut self) -> TokenStream {
         let mut result = Vec::new();
         loop {
-            match self.token {
+            match self.token.kind {
                 token::Eof | token::CloseDelim(..) => break,
                 _ => result.push(self.parse_token_tree().into()),
             }
@@ -2706,8 +2714,8 @@ impl<'a> Parser<'a> {
                              -> PResult<'a, P<Expr>> {
         let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
         let lo = self.span;
-        // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
-        let (hi, ex) = match self.token {
+        // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
+        let (hi, ex) = match self.token.kind {
             token::Not => {
                 self.bump();
                 let e = self.parse_prefix_expr(None);
@@ -2760,10 +2768,10 @@ impl<'a> Parser<'a> {
                 // `not` is just an ordinary identifier in Rust-the-language,
                 // but as `rustc`-the-compiler, we can issue clever diagnostics
                 // for confused users who really want to say `!`
-                let token_cannot_continue_expr = |t: &token::Token| match *t {
+                let token_cannot_continue_expr = |t: &Token| match t.kind {
                     // These tokens can start an expression after `!`, but
                     // can't continue an expression after an ident
-                    token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
+                    token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
                     token::Literal(..) | token::Pound => true,
                     token::Interpolated(ref nt) => match **nt {
                         token::NtIdent(..) | token::NtExpr(..) |
@@ -3040,7 +3048,7 @@ impl<'a> Parser<'a> {
 
                 match self.parse_path(PathStyle::Expr) {
                     Ok(path) => {
-                        let (op_noun, op_verb) = match self.token {
+                        let (op_noun, op_verb) = match self.token.kind {
                             token::Lt => ("comparison", "comparing"),
                             token::BinOp(token::Shl) => ("shift", "shifting"),
                             _ => {
@@ -3359,7 +3367,7 @@ impl<'a> Parser<'a> {
         let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
                                                None)?;
         if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
-            if self.token == token::Token::Semi {
+            if self.token == token::Semi {
                 e.span_suggestion_short(
                     match_span,
                     "try removing this `match`",
@@ -3844,14 +3852,14 @@ impl<'a> Parser<'a> {
     // helper function to decide whether to parse as ident binding or to try to do
     // something more complex like range patterns
     fn parse_as_ident(&mut self) -> bool {
-        self.look_ahead(1, |t| match *t {
+        self.look_ahead(1, |t| match t.kind {
             token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
             token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false),
             // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the
             // range pattern branch
             token::DotDot => None,
             _ => Some(true),
-        }).unwrap_or_else(|| self.look_ahead(2, |t| match *t {
+        }).unwrap_or_else(|| self.look_ahead(2, |t| match t.kind {
             token::Comma | token::CloseDelim(token::Bracket) => true,
             _ => false,
         }))
@@ -3914,14 +3922,13 @@ impl<'a> Parser<'a> {
 
         let lo = self.span;
         let pat;
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::And) | token::AndAnd => {
                 // Parse &pat / &mut pat
                 self.expect_and()?;
                 let mutbl = self.parse_mutability();
-                if let token::Lifetime(ident) = self.token {
-                    let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern",
-                                                      ident));
+                if let token::Lifetime(name) = self.token.kind {
+                    let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name));
                     err.span_label(self.span, "unexpected lifetime");
                     return Err(err);
                 }
@@ -3990,7 +3997,7 @@ impl<'a> Parser<'a> {
                     // Parse an unqualified path
                     (None, self.parse_path(PathStyle::Expr)?)
                 };
-                match self.token {
+                match self.token.kind {
                     token::Not if qself.is_none() => {
                         // Parse macro invocation
                         self.bump();
@@ -3999,7 +4006,7 @@ impl<'a> Parser<'a> {
                         pat = PatKind::Mac(mac);
                     }
                     token::DotDotDot | token::DotDotEq | token::DotDot => {
-                        let end_kind = match self.token {
+                        let end_kind = match self.token.kind {
                             token::DotDot => RangeEnd::Excluded,
                             token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot),
                             token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq),
@@ -4325,8 +4332,8 @@ impl<'a> Parser<'a> {
     fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                      -> PResult<'a, Option<P<Item>>> {
         let token_lo = self.span;
-        let (ident, def) = match self.token {
-            token::Ident(ident, false) if ident.name == kw::Macro => {
+        let (ident, def) = match self.token.kind {
+            token::Ident(name, false) if name == kw::Macro => {
                 self.bump();
                 let ident = self.parse_ident()?;
                 let tokens = if self.check(&token::OpenDelim(token::Brace)) {
@@ -4344,7 +4351,7 @@ impl<'a> Parser<'a> {
                     };
                     TokenStream::new(vec![
                         args.into(),
-                        TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
+                        TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
                         body.into(),
                     ])
                 } else {
@@ -4354,8 +4361,8 @@ impl<'a> Parser<'a> {
 
                 (ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
             }
-            token::Ident(ident, _) if ident.name == sym::macro_rules &&
-                                   self.look_ahead(1, |t| *t == token::Not) => {
+            token::Ident(name, _) if name == sym::macro_rules &&
+                                     self.look_ahead(1, |t| *t == token::Not) => {
                 let prev_span = self.prev_span;
                 self.complain_if_pub_macro(&vis.node, prev_span);
                 self.bump();
@@ -4436,7 +4443,7 @@ impl<'a> Parser<'a> {
             }
 
             // it's a macro invocation
-            let id = match self.token {
+            let id = match self.token.kind {
                 token::OpenDelim(_) => Ident::invalid(), // no special identifier
                 _ => self.parse_ident()?,
             };
@@ -4444,7 +4451,7 @@ impl<'a> Parser<'a> {
             // check that we're pointing at delimiters (need to check
             // again after the `if`, because of `parse_ident`
             // consuming more tokens).
-            match self.token {
+            match self.token.kind {
                 token::OpenDelim(_) => {}
                 _ => {
                     // we only expect an ident if we didn't parse one
@@ -4481,7 +4488,9 @@ impl<'a> Parser<'a> {
                 // We used to incorrectly stop parsing macro-expanded statements here.
                 // If the next token will be an error anyway but could have parsed with the
                 // earlier behavior, stop parsing here and emit a warning to avoid breakage.
-                else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
+                else if macro_legacy_warnings &&
+                        self.token.can_begin_expr() &&
+                        match self.token.kind {
                     // These can continue an expression, so we can't stop parsing and warn.
                     token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
                     token::BinOp(token::Minus) | token::BinOp(token::Star) |
@@ -4779,7 +4788,7 @@ impl<'a> Parser<'a> {
         let mut last_plus_span = None;
         let mut was_negative = false;
         loop {
-            // This needs to be synchronized with `Token::can_begin_bound`.
+            // This needs to be synchronized with `TokenKind::can_begin_bound`.
             let is_bound_start = self.check_path() || self.check_lifetime() ||
                                  self.check(&token::Not) || // used for error reporting only
                                  self.check(&token::Question) ||
@@ -5250,7 +5259,7 @@ impl<'a> Parser<'a> {
                 assoc_ty_constraints.push(span);
             } else if self.check_const_arg() {
                 // Parse const argument.
-                let expr = if let token::OpenDelim(token::Brace) = self.token {
+                let expr = if let token::OpenDelim(token::Brace) = self.token.kind {
                     self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
                 } else if self.token.is_ident() {
                     // FIXME(const_generics): to distinguish between idents for types and consts,
@@ -5477,10 +5486,10 @@ impl<'a> Parser<'a> {
 
     /// Returns the parsed optional self argument and whether a self shortcut was used.
     fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
-        let expect_ident = |this: &mut Self| match this.token {
+        let expect_ident = |this: &mut Self| match this.token.kind {
             // Preserve hygienic context.
-            token::Ident(ident, _) =>
-                { let span = this.span; this.bump(); Ident::new(ident.name, span) }
+            token::Ident(name, _) =>
+                { let span = this.span; this.bump(); Ident::new(name, span) }
             _ => unreachable!()
         };
         let isolated_self = |this: &mut Self, n| {
@@ -5492,7 +5501,7 @@ impl<'a> Parser<'a> {
         // Only a limited set of initial token sequences is considered `self` parameters; anything
         // else is parsed as a normal function parameter list, so some lookahead is required.
         let eself_lo = self.span;
-        let (eself, eself_ident, eself_hi) = match self.token {
+        let (eself, eself_ident, eself_hi) = match self.token.kind {
             token::BinOp(token::And) => {
                 // `&self`
                 // `&mut self`
@@ -5803,11 +5812,7 @@ impl<'a> Parser<'a> {
         match *vis {
             VisibilityKind::Inherited => {}
             _ => {
-                let is_macro_rules: bool = match self.token {
-                    token::Ident(sid, _) => sid.name == sym::macro_rules,
-                    _ => false,
-                };
-                let mut err = if is_macro_rules {
+                let mut err = if self.token.is_keyword(sym::macro_rules) {
                     let mut err = self.diagnostic()
                         .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
                     err.span_suggestion(
@@ -5918,9 +5923,9 @@ impl<'a> Parser<'a> {
             self.expect(&token::OpenDelim(token::Brace))?;
             let mut trait_items = vec![];
             while !self.eat(&token::CloseDelim(token::Brace)) {
-                if let token::DocComment(_) = self.token {
+                if let token::DocComment(_) = self.token.kind {
                     if self.look_ahead(1,
-                    |tok| tok == &token::Token::CloseDelim(token::Brace)) {
+                    |tok| tok == &token::CloseDelim(token::Brace)) {
                         let mut err = self.diagnostic().struct_span_err_with_code(
                             self.span,
                             "found a documentation comment that doesn't document anything",
@@ -6246,7 +6251,7 @@ impl<'a> Parser<'a> {
         if self.token == token::Comma {
             seen_comma = true;
         }
-        match self.token {
+        match self.token.kind {
             token::Comma => {
                 self.bump();
             }
@@ -6413,7 +6418,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Given a termination token, parses all of the items in a module.
-    fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
+    fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> {
         let mut items = vec![];
         while let Some(item) = self.parse_item()? {
             items.push(item);
@@ -6796,7 +6801,7 @@ impl<'a> Parser<'a> {
         let mut replacement = vec![];
         let mut fixed_crate_name = false;
         // Accept `extern crate name-like-this` for better diagnostics
-        let dash = token::Token::BinOp(token::BinOpToken::Minus);
+        let dash = token::BinOp(token::BinOpToken::Minus);
         if self.token == dash {  // Do not include `-` as part of the expected tokens list
             while self.eat(&dash) {
                 fixed_crate_name = true;
@@ -7011,7 +7016,7 @@ impl<'a> Parser<'a> {
     /// Parses a string as an ABI spec on an extern type or module. Consumes
     /// the `extern` keyword, if one is found.
     fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
-        match self.token {
+        match self.token.kind {
             token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
             token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
                 let sp = self.span;
@@ -7046,7 +7051,7 @@ impl<'a> Parser<'a> {
                 if token.is_keyword(kw::Move) {
                     return true;
                 }
-                match *token {
+                match token.kind {
                     token::BinOp(token::Or) | token::OrOr => true,
                     _ => false,
                 }
@@ -7818,7 +7823,7 @@ impl<'a> Parser<'a> {
     }
 
     pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
-        let ret = match self.token {
+        let ret = match self.token.kind {
             token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
                 (symbol, ast::StrStyle::Cooked, suffix),
             token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
@@ -7869,7 +7874,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
     for unmatched in unclosed_delims.iter() {
         let mut err = handler.struct_span_err(unmatched.found_span, &format!(
             "incorrect close delimiter: `{}`",
-            pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+            pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
         ));
         err.span_label(unmatched.found_span, "incorrect close delimiter");
         if let Some(sp) = unmatched.candidate_span {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 47185df8d61..28a733728bf 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -2,22 +2,22 @@ pub use BinOpToken::*;
 pub use Nonterminal::*;
 pub use DelimToken::*;
 pub use LitKind::*;
-pub use Token::*;
+pub use TokenKind::*;
 
 use crate::ast::{self};
-use crate::parse::ParseSess;
+use crate::parse::{parse_stream_from_source_str, ParseSess};
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::symbol::kw;
-use crate::syntax::parse::parse_stream_from_source_str;
 use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
 
-use syntax_pos::symbol::{self, Symbol};
-use syntax_pos::{self, Span, FileName};
+use syntax_pos::symbol::Symbol;
+use syntax_pos::{self, Span, FileName, DUMMY_SP};
 use log::info;
 
 use std::fmt;
 use std::mem;
+use std::ops::Deref;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert_size;
 use rustc_data_structures::sync::Lrc;
@@ -117,8 +117,8 @@ impl Lit {
     }
 }
 
-pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
-    let ident_token: Token = Ident(ident, is_raw);
+pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
+    let ident_token = Token::new(Ident(name, is_raw), span);
 
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
@@ -145,11 +145,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
         kw::While,
         kw::Yield,
         kw::Static,
-    ].contains(&ident.name)
+    ].contains(&name)
 }
 
-fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
-    let ident_token: Token = Ident(ident, is_raw);
+fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool {
+    let ident_token = Token::new(Ident(name, is_raw), span);
 
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
@@ -162,11 +162,11 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
         kw::Extern,
         kw::Typeof,
         kw::Dyn,
-    ].contains(&ident.name)
+    ].contains(&name)
 }
 
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
-pub enum Token {
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub enum TokenKind {
     /* Expression-operator symbols. */
     Eq,
     Lt,
@@ -209,8 +209,8 @@ pub enum Token {
     Literal(Lit),
 
     /* Name components */
-    Ident(ast::Ident, /* is_raw */ bool),
-    Lifetime(ast::Ident),
+    Ident(ast::Name, /* is_raw */ bool),
+    Lifetime(ast::Name),
 
     Interpolated(Lrc<Nonterminal>),
 
@@ -231,14 +231,20 @@ pub enum Token {
     Eof,
 }
 
-// `Token` is used a lot. Make sure it doesn't unintentionally get bigger.
+// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(target_arch = "x86_64")]
-static_assert_size!(Token, 16);
+static_assert_size!(TokenKind, 16);
 
-impl Token {
-    /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
-    pub fn from_ast_ident(ident: ast::Ident) -> Token {
-        Ident(ident, ident.is_raw_guess())
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub struct Token {
+    pub kind: TokenKind,
+    pub span: Span,
+}
+
+impl TokenKind {
+    /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary.
+    pub fn from_ast_ident(ident: ast::Ident) -> TokenKind {
+        Ident(ident.name, ident.is_raw_guess())
     }
 
     crate fn is_like_plus(&self) -> bool {
@@ -247,12 +253,14 @@ impl Token {
             _ => false,
         }
     }
+}
 
+impl Token {
     /// Returns `true` if the token can appear at the start of an expression.
     crate fn can_begin_expr(&self) -> bool {
-        match *self {
-            Ident(ident, is_raw)              =>
-                ident_can_begin_expr(ident, is_raw), // value name or keyword
+        match self.kind {
+            Ident(name, is_raw)              =>
+                ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
             OpenDelim(..)                     | // tuple, array or block
             Literal(..)                       | // literal
             Not                               | // operator not
@@ -282,9 +290,9 @@ impl Token {
 
     /// Returns `true` if the token can appear at the start of a type.
     crate fn can_begin_type(&self) -> bool {
-        match *self {
-            Ident(ident, is_raw)        =>
-                ident_can_begin_type(ident, is_raw), // type name or keyword
+        match self.kind {
+            Ident(name, is_raw)        =>
+                ident_can_begin_type(name, self.span, is_raw), // type name or keyword
             OpenDelim(Paren)            | // tuple
             OpenDelim(Bracket)          | // array
             Not                         | // never
@@ -302,7 +310,9 @@ impl Token {
             _ => false,
         }
     }
+}
 
+impl TokenKind {
     /// Returns `true` if the token can appear at the start of a const param.
     pub fn can_begin_const_arg(&self) -> bool {
         match self {
@@ -316,14 +326,18 @@ impl Token {
             _ => self.can_begin_literal_or_bool(),
         }
     }
+}
 
+impl Token {
     /// Returns `true` if the token can appear at the start of a generic bound.
     crate fn can_begin_bound(&self) -> bool {
         self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
         self == &Question || self == &OpenDelim(Paren)
     }
+}
 
-    pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
+impl TokenKind {
+    pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind {
         Literal(Lit::new(kind, symbol, suffix))
     }
 
@@ -348,8 +362,8 @@ impl Token {
         match *self {
             Literal(..)  => true,
             BinOp(Minus) => true,
-            Ident(ident, false) if ident.name == kw::True => true,
-            Ident(ident, false) if ident.name == kw::False => true,
+            Ident(name, false) if name == kw::True => true,
+            Ident(name, false) if name == kw::False => true,
             Interpolated(ref nt) => match **nt {
                 NtLiteral(..) => true,
                 _             => false,
@@ -357,11 +371,13 @@ impl Token {
             _            => false,
         }
     }
+}
 
+impl Token {
     /// Returns an identifier if this token is an identifier.
     pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
-        match *self {
-            Ident(ident, is_raw) => Some((ident, is_raw)),
+        match self.kind {
+            Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)),
             Interpolated(ref nt) => match **nt {
                 NtIdent(ident, is_raw) => Some((ident, is_raw)),
                 _ => None,
@@ -369,10 +385,11 @@ impl Token {
             _ => None,
         }
     }
+
     /// Returns a lifetime identifier if this token is a lifetime.
     pub fn lifetime(&self) -> Option<ast::Ident> {
-        match *self {
-            Lifetime(ident) => Some(ident),
+        match self.kind {
+            Lifetime(name) => Some(ast::Ident::new(name, self.span)),
             Interpolated(ref nt) => match **nt {
                 NtLifetime(ident) => Some(ident),
                 _ => None,
@@ -380,22 +397,44 @@ impl Token {
             _ => None,
         }
     }
+}
+
+impl TokenKind {
+    /// Returns an identifier name if this token is an identifier.
+    pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> {
+        match *self {
+            Ident(name, is_raw) => Some((name, is_raw)),
+            Interpolated(ref nt) => match **nt {
+                NtIdent(ident, is_raw) => Some((ident.name, is_raw)),
+                _ => None,
+            },
+            _ => None,
+        }
+    }
+    /// Returns a lifetime name if this token is a lifetime.
+    pub fn lifetime_name(&self) -> Option<ast::Name> {
+        match *self {
+            Lifetime(name) => Some(name),
+            Interpolated(ref nt) => match **nt {
+                NtLifetime(ident) => Some(ident.name),
+                _ => None,
+            },
+            _ => None,
+        }
+    }
     /// Returns `true` if the token is an identifier.
     pub fn is_ident(&self) -> bool {
-        self.ident().is_some()
+        self.ident_name().is_some()
     }
     /// Returns `true` if the token is a lifetime.
     crate fn is_lifetime(&self) -> bool {
-        self.lifetime().is_some()
+        self.lifetime_name().is_some()
     }
 
     /// Returns `true` if the token is a identifier whose name is the given
     /// string slice.
     crate fn is_ident_named(&self, name: Symbol) -> bool {
-        match self.ident() {
-            Some((ident, _)) => ident.name == name,
-            None => false
-        }
+        self.ident_name().map_or(false, |(ident_name, _)| ident_name == name)
     }
 
     /// Returns `true` if the token is an interpolated path.
@@ -417,24 +456,30 @@ impl Token {
     crate fn is_qpath_start(&self) -> bool {
         self == &Lt || self == &BinOp(Shl)
     }
+}
 
+impl Token {
     crate fn is_path_start(&self) -> bool {
         self == &ModSep || self.is_qpath_start() || self.is_path() ||
         self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
     }
+}
 
+impl TokenKind {
     /// Returns `true` if the token is a given keyword, `kw`.
     pub fn is_keyword(&self, kw: Symbol) -> bool {
-        self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
+        self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false)
     }
 
     pub fn is_path_segment_keyword(&self) -> bool {
-        match self.ident() {
-            Some((id, false)) => id.is_path_segment_keyword(),
+        match self.ident_name() {
+            Some((name, false)) => name.is_path_segment_keyword(),
             _ => false,
         }
     }
+}
 
+impl Token {
     // Returns true for reserved identifiers used internally for elided lifetimes,
     // unnamed method parameters, crate root module, error recovery etc.
     pub fn is_special_ident(&self) -> bool {
@@ -467,8 +512,10 @@ impl Token {
             _ => false,
         }
     }
+}
 
-    crate fn glue(self, joint: Token) -> Option<Token> {
+impl TokenKind {
+    crate fn glue(self, joint: TokenKind) -> Option<TokenKind> {
         Some(match self {
             Eq => match joint {
                 Eq => EqEq,
@@ -514,13 +561,7 @@ impl Token {
                 _ => return None,
             },
             SingleQuote => match joint {
-                Ident(ident, false) => {
-                    let name = Symbol::intern(&format!("'{}", ident));
-                    Lifetime(symbol::Ident {
-                        name,
-                        span: ident.span,
-                    })
-                }
+                Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))),
                 _ => return None,
             },
 
@@ -534,7 +575,7 @@ impl Token {
 
     /// Returns tokens that are likely to be typed accidentally instead of the current token.
     /// Enables better error recovery when the wrong token is found.
-    crate fn similar_tokens(&self) -> Option<Vec<Token>> {
+    crate fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
         match *self {
             Comma => Some(vec![Dot, Lt, Semi]),
             Semi => Some(vec![Colon, Comma]),
@@ -544,7 +585,7 @@ impl Token {
 
     // See comments in `Nonterminal::to_tokenstream` for why we care about
     // *probably* equal here rather than actual equality
-    crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
+    crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool {
         if mem::discriminant(self) != mem::discriminant(other) {
             return false
         }
@@ -590,10 +631,10 @@ impl Token {
 
             (&Literal(a), &Literal(b)) => a == b,
 
-            (&Lifetime(a), &Lifetime(b)) => a.name == b.name,
-            (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
-                                                       a.name == kw::DollarCrate ||
-                                                       c.name == kw::DollarCrate),
+            (&Lifetime(a), &Lifetime(b)) => a == b,
+            (&Ident(a, b), &Ident(c, d)) => b == d && (a == c ||
+                                                       a == kw::DollarCrate ||
+                                                       c == kw::DollarCrate),
 
             (&Interpolated(_), &Interpolated(_)) => false,
 
@@ -602,6 +643,36 @@ impl Token {
     }
 }
 
+impl Token {
+    crate fn new(kind: TokenKind, span: Span) -> Self {
+        Token { kind, span }
+    }
+
+    /// Some token that will be thrown away later.
+    crate fn dummy() -> Self {
+        Token::new(TokenKind::Whitespace, DUMMY_SP)
+    }
+
+    /// Return this token by value and leave a dummy token in its place.
+    crate fn take(&mut self) -> Self {
+        mem::replace(self, Token::dummy())
+    }
+}
+
+impl PartialEq<TokenKind> for Token {
+    fn eq(&self, rhs: &TokenKind) -> bool {
+        self.kind == *rhs
+    }
+}
+
+// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
+impl Deref for Token {
+    type Target = TokenKind;
+    fn deref(&self) -> &Self::Target {
+        &self.kind
+    }
+}
+
 #[derive(Clone, RustcEncodable, RustcDecodable)]
 /// For interpolation during macro expansion.
 pub enum Nonterminal {
@@ -691,12 +762,10 @@ impl Nonterminal {
                 prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
             }
             Nonterminal::NtIdent(ident, is_raw) => {
-                let token = Token::Ident(ident, is_raw);
-                Some(TokenTree::Token(ident.span, token).into())
+                Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
             }
             Nonterminal::NtLifetime(ident) => {
-                let token = Token::Lifetime(ident);
-                Some(TokenTree::Token(ident.span, token).into())
+                Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
             }
             Nonterminal::NtTT(ref tt) => {
                 Some(tt.clone().into())
@@ -743,7 +812,7 @@ impl Nonterminal {
     }
 }
 
-crate fn is_op(tok: &Token) -> bool {
+crate fn is_op(tok: &TokenKind) -> bool {
     match *tok {
         OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
         Ident(..) | Lifetime(..) | Interpolated(..) |
@@ -781,8 +850,8 @@ fn prepend_attrs(sess: &ParseSess,
         // For simple paths, push the identifier directly
         if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
             let ident = attr.path.segments[0].ident;
-            let token = Ident(ident, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::Token(ident.span, token));
+            let token = Ident(ident.name, ident.as_str().starts_with("r#"));
+            brackets.push(tokenstream::TokenTree::token(token, ident.span));
 
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
@@ -796,7 +865,7 @@ fn prepend_attrs(sess: &ParseSess,
         // The span we list here for `#` and for `[ ... ]` are both wrong in
         // that it encompasses more than each token, but it hopefully is "good
         // enough" for now at least.
-        builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
+        builder.push(tokenstream::TokenTree::token(Pound, attr.span));
         let delim_span = DelimSpan::from_single(attr.span);
         builder.push(tokenstream::TokenTree::Delimited(
             delim_span, DelimToken::Bracket, brackets.build().into()));
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 57c01e9e3ef..07acfb5dc86 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -6,7 +6,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
 use crate::util::parser::{self, AssocOp, Fixity};
 use crate::attr;
 use crate::source_map::{self, SourceMap, Spanned};
-use crate::parse::token::{self, BinOpToken, Nonterminal, Token};
+use crate::parse::token::{self, BinOpToken, Nonterminal, TokenKind};
 use crate::parse::lexer::comments;
 use crate::parse::{self, ParseSess};
 use crate::print::pp::{self, Breaks};
@@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
     out
 }
 
-pub fn token_to_string(tok: &Token) -> String {
+pub fn token_to_string(tok: &TokenKind) -> String {
     match *tok {
         token::Eq                   => "=".to_string(),
         token::Lt                   => "<".to_string(),
@@ -724,10 +724,10 @@ pub trait PrintState<'a> {
     /// expression arguments as expressions). It can be done! I think.
     fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
         match tt {
-            TokenTree::Token(_, ref tk) => {
-                self.writer().word(token_to_string(tk))?;
-                match *tk {
-                    parse::token::DocComment(..) => {
+            TokenTree::Token(ref token) => {
+                self.writer().word(token_to_string(&token))?;
+                match token.kind {
+                    token::DocComment(..) => {
                         self.writer().hardbreak()
                     }
                     _ => Ok(())
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 397fb45513c..9dea3a4dcc1 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -16,7 +16,7 @@
 use crate::ext::base;
 use crate::ext::tt::{macro_parser, quoted};
 use crate::parse::Directory;
-use crate::parse::token::{self, DelimToken, Token};
+use crate::parse::token::{self, DelimToken, Token, TokenKind};
 use crate::print::pprust;
 
 use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
@@ -44,7 +44,7 @@ use std::{fmt, iter, mem};
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
     /// A single token
-    Token(Span, token::Token),
+    Token(Token),
     /// A delimited sequence of token trees
     Delimited(DelimSpan, DelimToken, TokenStream),
 }
@@ -53,8 +53,7 @@ pub enum TokenTree {
 #[cfg(parallel_compiler)]
 fn _dummy()
 where
-    Span: Send + Sync,
-    token::Token: Send + Sync,
+    Token: Send + Sync,
     DelimSpan: Send + Sync,
     DelimToken: Send + Sync,
     TokenStream: Send + Sync,
@@ -86,12 +85,11 @@ impl TokenTree {
     /// Checks if this TokenTree is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
-            (&TokenTree::Delimited(_, delim, ref tts),
-             &TokenTree::Delimited(_, delim2, ref tts2)) => {
+            (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
+            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.eq_unspanned(&tts2)
             }
-            (_, _) => false,
+            _ => false,
         }
     }
 
@@ -102,37 +100,36 @@ impl TokenTree {
     // different method.
     pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
-                tk.probably_equal_for_proc_macro(tk2)
+            (TokenTree::Token(token), TokenTree::Token(token2)) => {
+                token.probably_equal_for_proc_macro(token2)
             }
-            (&TokenTree::Delimited(_, delim, ref tts),
-             &TokenTree::Delimited(_, delim2, ref tts2)) => {
+            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
             }
-            (_, _) => false,
+            _ => false,
         }
     }
 
     /// Retrieves the TokenTree's span.
     pub fn span(&self) -> Span {
-        match *self {
-            TokenTree::Token(sp, _) => sp,
+        match self {
+            TokenTree::Token(token) => token.span,
             TokenTree::Delimited(sp, ..) => sp.entire(),
         }
     }
 
     /// Modify the `TokenTree`'s span in-place.
     pub fn set_span(&mut self, span: Span) {
-        match *self {
-            TokenTree::Token(ref mut sp, _) => *sp = span,
-            TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
+        match self {
+            TokenTree::Token(token) => token.span = span,
+            TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
         }
     }
 
     /// Indicates if the stream is a token that is equal to the provided token.
-    pub fn eq_token(&self, t: Token) -> bool {
-        match *self {
-            TokenTree::Token(_, ref tk) => *tk == t,
+    pub fn eq_token(&self, t: TokenKind) -> bool {
+        match self {
+            TokenTree::Token(token) => *token == t,
             _ => false,
         }
     }
@@ -141,6 +138,10 @@ impl TokenTree {
         TokenStream::new(vec![(self, Joint)])
     }
 
+    pub fn token(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span))
+    }
+
     /// Returns the opening delimiter as a token tree.
     pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
         let open_span = if span.is_dummy() {
@@ -148,7 +149,7 @@ impl TokenTree {
         } else {
             span.with_hi(span.lo() + BytePos(delim.len() as u32))
         };
-        TokenTree::Token(open_span, token::OpenDelim(delim))
+        TokenTree::token(token::OpenDelim(delim), open_span)
     }
 
     /// Returns the closing delimiter as a token tree.
@@ -158,7 +159,7 @@ impl TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(delim.len() as u32))
         };
-        TokenTree::Token(close_span, token::CloseDelim(delim))
+        TokenTree::token(token::CloseDelim(delim), close_span)
     }
 }
 
@@ -167,7 +168,7 @@ impl TokenTree {
 /// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
 /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
 /// instead of a representation of the abstract syntax tree.
-/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
+/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
 ///
 /// The use of `Option` is an optimization that avoids the need for an
 /// allocation when the stream is empty. However, it is not guaranteed that an
@@ -201,18 +202,18 @@ impl TokenStream {
             while let Some((pos, ts)) = iter.next() {
                 if let Some((_, next)) = iter.peek() {
                     let sp = match (&ts, &next) {
-                        (_, (TokenTree::Token(_, token::Token::Comma), _)) => continue,
-                        ((TokenTree::Token(sp, token_left), NonJoint),
-                         (TokenTree::Token(_, token_right), _))
+                        (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+                        ((TokenTree::Token(token_left), NonJoint),
+                         (TokenTree::Token(token_right), _))
                         if ((token_left.is_ident() && !token_left.is_reserved_ident())
                             || token_left.is_lit()) &&
                             ((token_right.is_ident() && !token_right.is_reserved_ident())
-                            || token_right.is_lit()) => *sp,
+                            || token_right.is_lit()) => token_left.span,
                         ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
                         _ => continue,
                     };
                     let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::Token(sp, token::Comma), NonJoint);
+                    let comma = (TokenTree::token(token::Comma, sp), NonJoint);
                     suggestion = Some((pos, comma, sp));
                 }
             }
@@ -241,12 +242,6 @@ impl From<TokenTree> for TreeAndJoint {
     }
 }
 
-impl From<Token> for TokenStream {
-    fn from(token: Token) -> TokenStream {
-        TokenTree::Token(DUMMY_SP, token).into()
-    }
-}
-
 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
     fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
         TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
@@ -349,22 +344,25 @@ impl TokenStream {
         // streams, making a comparison between a token stream generated from an
         // AST and a token stream which was parsed into an AST more reliable.
         fn semantic_tree(tree: &TokenTree) -> bool {
-            match tree {
-                // The pretty printer tends to add trailing commas to
-                // everything, and in particular, after struct fields.
-                | TokenTree::Token(_, Token::Comma)
-                // The pretty printer emits `NoDelim` as whitespace.
-                | TokenTree::Token(_, Token::OpenDelim(DelimToken::NoDelim))
-                | TokenTree::Token(_, Token::CloseDelim(DelimToken::NoDelim))
-                // The pretty printer collapses many semicolons into one.
-                | TokenTree::Token(_, Token::Semi)
-                // The pretty printer collapses whitespace arbitrarily and can
-                // introduce whitespace from `NoDelim`.
-                | TokenTree::Token(_, Token::Whitespace)
-                // The pretty printer can turn `$crate` into `::crate_name`
-                | TokenTree::Token(_, Token::ModSep) => false,
-                _ => true
+            if let TokenTree::Token(token) = tree {
+                if let
+                    // The pretty printer tends to add trailing commas to
+                    // everything, and in particular, after struct fields.
+                    | token::Comma
+                    // The pretty printer emits `NoDelim` as whitespace.
+                    | token::OpenDelim(DelimToken::NoDelim)
+                    | token::CloseDelim(DelimToken::NoDelim)
+                    // The pretty printer collapses many semicolons into one.
+                    | token::Semi
+                    // The pretty printer collapses whitespace arbitrarily and can
+                    // introduce whitespace from `NoDelim`.
+                    | token::Whitespace
+                    // The pretty printer can turn `$crate` into `::crate_name`
+                    | token::ModSep = token.kind {
+                    return false;
+                }
             }
+            true
         }
 
         let mut t1 = self.trees().filter(semantic_tree);
@@ -430,13 +428,13 @@ impl TokenStreamBuilder {
     pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
         let stream = stream.into();
         let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
-        if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint {
-            if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() {
-                if let Some(glued_tok) = last_tok.glue(tok) {
+        if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
+            if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
+                if let Some(glued_tok) = last_token.kind.glue(token.kind) {
                     let last_stream = self.0.pop().unwrap();
                     self.push_all_but_last_tree(&last_stream);
-                    let glued_span = last_span.to(span);
-                    let glued_tt = TokenTree::Token(glued_span, glued_tok);
+                    let glued_span = last_token.span.to(token.span);
+                    let glued_tt = TokenTree::token(glued_tok, glued_span);
                     let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
                     self.0.push(glued_tokenstream);
                     self.push_all_but_first_tree(&stream);
@@ -578,9 +576,8 @@ impl DelimSpan {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::syntax::ast::Ident;
+    use crate::ast::Name;
     use crate::with_default_globals;
-    use crate::parse::token::Token;
     use crate::util::parser_testing::string_to_stream;
     use syntax_pos::{Span, BytePos, NO_EXPANSION};
 
@@ -664,7 +661,7 @@ mod tests {
         with_default_globals(|| {
             let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
             let test1: TokenStream =
-                TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into();
             let test2 = string_to_ts("foo(bar::baz)");
 
             assert_eq!(test0.is_empty(), true);
@@ -677,9 +674,9 @@ mod tests {
     fn test_dotdotdot() {
         with_default_globals(|| {
             let mut builder = TokenStreamBuilder::new();
-            builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint());
-            builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint());
-            builder.push(TokenTree::Token(sp(2, 3), Token::Dot));
+            builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(2, 3)));
             let stream = builder.build();
             assert!(stream.eq_unspanned(&string_to_ts("...")));
             assert_eq!(stream.trees().count(), 1);
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index 7e306d59e35..9e26f1bf7d3 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -1,4 +1,4 @@
-use crate::parse::token::{Token, BinOpToken};
+use crate::parse::token::{self, TokenKind, BinOpToken};
 use crate::symbol::kw;
 use crate::ast::{self, BinOpKind};
 
@@ -69,34 +69,34 @@ pub enum Fixity {
 
 impl AssocOp {
     /// Creates a new AssocOP from a token
-    pub fn from_token(t: &Token) -> Option<AssocOp> {
+    pub fn from_token(t: &TokenKind) -> Option<AssocOp> {
         use AssocOp::*;
         match *t {
-            Token::BinOpEq(k) => Some(AssignOp(k)),
-            Token::Eq => Some(Assign),
-            Token::BinOp(BinOpToken::Star) => Some(Multiply),
-            Token::BinOp(BinOpToken::Slash) => Some(Divide),
-            Token::BinOp(BinOpToken::Percent) => Some(Modulus),
-            Token::BinOp(BinOpToken::Plus) => Some(Add),
-            Token::BinOp(BinOpToken::Minus) => Some(Subtract),
-            Token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
-            Token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
-            Token::BinOp(BinOpToken::And) => Some(BitAnd),
-            Token::BinOp(BinOpToken::Caret) => Some(BitXor),
-            Token::BinOp(BinOpToken::Or) => Some(BitOr),
-            Token::Lt => Some(Less),
-            Token::Le => Some(LessEqual),
-            Token::Ge => Some(GreaterEqual),
-            Token::Gt => Some(Greater),
-            Token::EqEq => Some(Equal),
-            Token::Ne => Some(NotEqual),
-            Token::AndAnd => Some(LAnd),
-            Token::OrOr => Some(LOr),
-            Token::DotDot => Some(DotDot),
-            Token::DotDotEq => Some(DotDotEq),
+            token::BinOpEq(k) => Some(AssignOp(k)),
+            token::Eq => Some(Assign),
+            token::BinOp(BinOpToken::Star) => Some(Multiply),
+            token::BinOp(BinOpToken::Slash) => Some(Divide),
+            token::BinOp(BinOpToken::Percent) => Some(Modulus),
+            token::BinOp(BinOpToken::Plus) => Some(Add),
+            token::BinOp(BinOpToken::Minus) => Some(Subtract),
+            token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
+            token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
+            token::BinOp(BinOpToken::And) => Some(BitAnd),
+            token::BinOp(BinOpToken::Caret) => Some(BitXor),
+            token::BinOp(BinOpToken::Or) => Some(BitOr),
+            token::Lt => Some(Less),
+            token::Le => Some(LessEqual),
+            token::Ge => Some(GreaterEqual),
+            token::Gt => Some(Greater),
+            token::EqEq => Some(Equal),
+            token::Ne => Some(NotEqual),
+            token::AndAnd => Some(LAnd),
+            token::OrOr => Some(LOr),
+            token::DotDot => Some(DotDot),
+            token::DotDotEq => Some(DotDotEq),
             // DotDotDot is no longer supported, but we need some way to display the error
-            Token::DotDotDot => Some(DotDotEq),
-            Token::Colon => Some(Colon),
+            token::DotDotDot => Some(DotDotEq),
+            token::Colon => Some(Colon),
             _ if t.is_keyword(kw::As) => Some(As),
             _ => None
         }
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 334709b1521..4e6a8274a47 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -855,7 +855,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
 
 pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
     match tt {
-        TokenTree::Token(_, tok) => visitor.visit_token(tok),
+        TokenTree::Token(token) => visitor.visit_token(token),
         TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
     }
 }
diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs
index 4d7083c1a79..b015815ac9c 100644
--- a/src/libsyntax_ext/asm.rs
+++ b/src/libsyntax_ext/asm.rs
@@ -9,7 +9,8 @@ use errors::DiagnosticBuilder;
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::{self, token};
+use syntax::parse;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::ast::AsmDialect;
@@ -86,8 +87,8 @@ fn parse_inline_asm<'a>(
     let first_colon = tts.iter()
         .position(|tt| {
             match *tt {
-                tokenstream::TokenTree::Token(_, token::Colon) |
-                tokenstream::TokenTree::Token(_, token::ModSep) => true,
+                tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
+                tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
                 _ => false,
             }
         })
@@ -259,7 +260,7 @@ fn parse_inline_asm<'a>(
         loop {
             // MOD_SEP is a double colon '::' without space in between.
             // When encountered, the state must be advanced twice.
-            match (&p.token, state.next(), state.next().next()) {
+            match (&p.token.kind, state.next(), state.next().next()) {
                 (&token::Colon, StateNone, _) |
                 (&token::ModSep, _, StateNone) => {
                     p.bump();
diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs
index 13342c8e28e..3886528c74c 100644
--- a/src/libsyntax_ext/assert.rs
+++ b/src/libsyntax_ext/assert.rs
@@ -4,7 +4,7 @@ use syntax::ast::{self, *};
 use syntax::source_map::Spanned;
 use syntax::ext::base::*;
 use syntax::ext::build::AstBuilder;
-use syntax::parse::token::{self, Token};
+use syntax::parse::token::{self, TokenKind};
 use syntax::parse::parser::Parser;
 use syntax::print::pprust;
 use syntax::ptr::P;
@@ -29,12 +29,12 @@ pub fn expand_assert<'cx>(
     let panic_call = Mac_ {
         path: Path::from_ident(Ident::new(sym::panic, sp)),
         tts: custom_message.unwrap_or_else(|| {
-            TokenStream::from(TokenTree::Token(
-                DUMMY_SP,
-                Token::lit(token::Str, Symbol::intern(&format!(
+            TokenStream::from(TokenTree::token(
+                TokenKind::lit(token::Str, Symbol::intern(&format!(
                     "assertion failed: {}",
                     pprust::expr_to_string(&cond_expr).escape_debug()
                 )), None),
+                DUMMY_SP,
             ))
         }).into(),
         delim: MacDelimiter::Parenthesis,
@@ -103,7 +103,8 @@ fn parse_assert<'a>(
     //
     // Parse this as an actual message, and suggest inserting a comma. Eventually, this should be
     // turned into an error.
-    let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token {
+    let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. })
+                                = parser.token.kind {
         let mut err = cx.struct_span_warn(parser.span, "unexpected string literal");
         let comma_span = cx.source_map().next_point(parser.prev_span);
         err.span_suggestion_short(
diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs
index 77c53f402cc..8f061abc77b 100644
--- a/src/libsyntax_ext/concat_idents.rs
+++ b/src/libsyntax_ext/concat_idents.rs
@@ -3,7 +3,7 @@ use rustc_data_structures::thin_vec::ThinVec;
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax_pos::Span;
 use syntax_pos::symbol::{Symbol, sym};
@@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
     for (i, e) in tts.iter().enumerate() {
         if i & 1 == 1 {
             match *e {
-                TokenTree::Token(_, token::Comma) => {}
+                TokenTree::Token(Token { kind: token::Comma, .. }) => {}
                 _ => {
                     cx.span_err(sp, "concat_idents! expecting comma.");
                     return DummyResult::any(sp);
@@ -38,8 +38,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
             }
         } else {
             match *e {
-                TokenTree::Token(_, token::Ident(ident, _)) =>
-                    res_str.push_str(&ident.as_str()),
+                TokenTree::Token(Token { kind: token::Ident(name, _), .. }) =>
+                    res_str.push_str(&name.as_str()),
                 _ => {
                     cx.span_err(sp, "concat_idents! requires ident args.");
                     return DummyResult::any(sp);
diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs
index 975d96951dc..98465d75e46 100644
--- a/src/libsyntax_ext/deriving/custom.rs
+++ b/src/libsyntax_ext/deriving/custom.rs
@@ -8,7 +8,7 @@ use syntax::attr::{mark_used, mark_known};
 use syntax::source_map::Span;
 use syntax::ext::base::*;
 use syntax::parse;
-use syntax::parse::token::{self, Token};
+use syntax::parse::token;
 use syntax::tokenstream;
 use syntax::visit::Visitor;
 use syntax_pos::DUMMY_SP;
@@ -68,8 +68,8 @@ impl MultiItemModifier for ProcMacroDerive {
         // Mark attributes as known, and used.
         MarkAttrs(&self.attrs).visit_item(&item);
 
-        let token = Token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
+        let token = token::Interpolated(Lrc::new(token::NtItem(item)));
+        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
 
         let server = proc_macro_server::Rustc::new(ecx);
         let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs
index b5be45547cf..c78215b77a9 100644
--- a/src/libsyntax_ext/format.rs
+++ b/src/libsyntax_ext/format.rs
@@ -149,16 +149,16 @@ fn parse_args<'a>(
         } // accept trailing commas
         if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
             named = true;
-            let ident = if let token::Ident(i, _) = p.token {
+            let name = if let token::Ident(name, _) = p.token.kind {
                 p.bump();
-                i
+                name
             } else {
                 return Err(ecx.struct_span_err(
                     p.span,
                     "expected ident, positional arguments cannot follow named arguments",
                 ));
             };
-            let name: &str = &ident.as_str();
+            let name: &str = &name.as_str();
 
             p.expect(&token::Eq)?;
             let e = p.parse_expr()?;
diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_decls.rs
index de8b689396f..29297aa913e 100644
--- a/src/libsyntax_ext/proc_macro_decls.rs
+++ b/src/libsyntax_ext/proc_macro_decls.rs
@@ -132,7 +132,7 @@ impl<'a> CollectProcMacros<'a> {
             }
         };
 
-        if !trait_ident.can_be_raw() {
+        if !trait_ident.name.can_be_raw() {
             self.handler.span_err(trait_attr.span,
                                   &format!("`{}` cannot be a name of derive macro", trait_ident));
         }
@@ -166,7 +166,7 @@ impl<'a> CollectProcMacros<'a> {
                         return None;
                     }
                 };
-                if !ident.can_be_raw() {
+                if !ident.name.can_be_raw() {
                     self.handler.span_err(
                         attr.span,
                         &format!("`{}` cannot be a name of derive helper attribute", ident),
diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs
index cc05ecf8df5..00a420d3fa8 100644
--- a/src/libsyntax_ext/proc_macro_server.rs
+++ b/src/libsyntax_ext/proc_macro_server.rs
@@ -55,7 +55,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
         use syntax::parse::token::*;
 
         let joint = is_joint == Joint;
-        let (span, token) = match tree {
+        let Token { kind, span } = match tree {
             tokenstream::TokenTree::Delimited(span, delim, tts) => {
                 let delimiter = Delimiter::from_internal(delim);
                 return TokenTree::Group(Group {
@@ -64,7 +64,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
                     span,
                 });
             }
-            tokenstream::TokenTree::Token(span, token) => (span, token),
+            tokenstream::TokenTree::Token(token) => token,
         };
 
         macro_rules! tt {
@@ -93,7 +93,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
             }};
         }
 
-        match token {
+        match kind {
             Eq => op!('='),
             Lt => op!('<'),
             Le => op!('<', '='),
@@ -142,11 +142,10 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
             Question => op!('?'),
             SingleQuote => op!('\''),
 
-            Ident(ident, false) if ident.name == kw::DollarCrate =>
-                tt!(Ident::dollar_crate()),
-            Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)),
-            Lifetime(ident) => {
-                let ident = ident.without_first_quote();
+            Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
+            Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
+            Lifetime(name) => {
+                let ident = ast::Ident::new(name, span).without_first_quote();
                 stack.push(tt!(Ident::new(ident.name, false)));
                 tt!(Punct::new('\'', true))
             }
@@ -159,12 +158,12 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
                     escaped.extend(ch.escape_debug());
                 }
                 let stream = vec![
-                    Ident(ast::Ident::new(sym::doc, span), false),
+                    Ident(sym::doc, false),
                     Eq,
-                    Token::lit(token::Str, Symbol::intern(&escaped), None),
+                    TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                 ]
                 .into_iter()
-                .map(|token| tokenstream::TokenTree::Token(span, token))
+                .map(|kind| tokenstream::TokenTree::token(kind, span))
                 .collect();
                 stack.push(TokenTree::Group(Group {
                     delimiter: Delimiter::Bracket,
@@ -211,8 +210,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
                 .into();
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
-                let token = Ident(ast::Ident::new(sym, span), is_raw);
-                return tokenstream::TokenTree::Token(span, token).into();
+                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
             }
             TokenTree::Literal(self::Literal {
                 lit: token::Lit { kind: token::Integer, symbol, suffix },
@@ -220,9 +218,9 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
             }) if symbol.as_str().starts_with("-") => {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
-                let integer = Token::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, integer);
+                let integer = TokenKind::lit(token::Integer, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(integer, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -231,17 +229,17 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
             }) if symbol.as_str().starts_with("-") => {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
-                let float = Token::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, float);
+                let float = TokenKind::lit(token::Float, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(float, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::Token(span, Literal(lit)).into()
+                return tokenstream::TokenTree::token(Literal(lit), span).into()
             }
         };
 
-        let token = match ch {
+        let kind = match ch {
             '=' => Eq,
             '<' => Lt,
             '>' => Gt,
@@ -267,7 +265,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::Token(span, token);
+        let tree = tokenstream::TokenTree::token(kind, span);
         TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
     }
 }
@@ -338,7 +336,8 @@ impl Ident {
         if !Self::is_valid(&string) {
             panic!("`{:?}` is not a valid identifier", string)
         }
-        if is_raw && !ast::Ident::from_interned_str(sym.as_interned_str()).can_be_raw() {
+        // Get rid of gensyms to conservatively check rawness on the string contents only.
+        if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() {
             panic!("`{}` cannot be a raw identifier", string);
         }
         Ident { sym, is_raw, span }
diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs
index 61ef94560cc..6c74f77ff1f 100644
--- a/src/libsyntax_ext/trace_macros.rs
+++ b/src/libsyntax_ext/trace_macros.rs
@@ -17,10 +17,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
     }
 
     match (tt.len(), tt.first()) {
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
+        (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
             cx.set_trace_macros(true);
         }
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
+        (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
             cx.set_trace_macros(false);
         }
         _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs
index 4e080d115d2..5dd4d6566ed 100644
--- a/src/libsyntax_pos/symbol.rs
+++ b/src/libsyntax_pos/symbol.rs
@@ -921,10 +921,9 @@ pub struct Interner {
 
 impl Interner {
     fn prefill(init: &[&'static str]) -> Self {
-        let symbols = (0 .. init.len() as u32).map(Symbol::new);
         Interner {
-            strings: init.to_vec(),
-            names: init.iter().copied().zip(symbols).collect(),
+            strings: init.into(),
+            names: init.iter().copied().zip((0..).map(Symbol::new)).collect(),
             ..Default::default()
         }
     }
@@ -1019,6 +1018,21 @@ impl Symbol {
     pub fn is_doc_keyword(self) -> bool {
         self <= kw::Union
     }
+
+    /// A keyword or reserved identifier that can be used as a path segment.
+    pub fn is_path_segment_keyword(self) -> bool {
+        self == kw::Super ||
+        self == kw::SelfLower ||
+        self == kw::SelfUpper ||
+        self == kw::Crate ||
+        self == kw::PathRoot ||
+        self == kw::DollarCrate
+    }
+
+    /// This symbol can be a raw identifier.
+    pub fn can_be_raw(self) -> bool {
+        self != kw::Invalid && self != kw::Underscore && !self.is_path_segment_keyword()
+    }
 }
 
 impl Ident {
@@ -1049,24 +1063,13 @@ impl Ident {
 
     /// A keyword or reserved identifier that can be used as a path segment.
     pub fn is_path_segment_keyword(self) -> bool {
-        self.name == kw::Super ||
-        self.name == kw::SelfLower ||
-        self.name == kw::SelfUpper ||
-        self.name == kw::Crate ||
-        self.name == kw::PathRoot ||
-        self.name == kw::DollarCrate
-    }
-
-    /// This identifier can be a raw identifier.
-    pub fn can_be_raw(self) -> bool {
-        self.name != kw::Invalid && self.name != kw::Underscore &&
-        !self.is_path_segment_keyword()
+        self.name.is_path_segment_keyword()
     }
 
     /// We see this identifier in a normal identifier position, like variable name or a type.
     /// How was it written originally? Did it use the raw form? Let's try to guess.
     pub fn is_raw_guess(self) -> bool {
-        self.can_be_raw() && self.is_reserved()
+        self.name.can_be_raw() && self.is_reserved()
     }
 }
 
diff --git a/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs b/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs
index 216c81ca34c..4d9e0129e54 100644
--- a/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs
+++ b/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs
@@ -1,3 +1,9 @@
+// WARNING WARNING WARNING WARNING WARNING
+// =======================================
+//
+// This code also appears in src/doc/unstable-book/src/language-features/plugin.md.
+// Please keep the two copies in sync!  FIXME: have rustdoc read this file
+
 // force-host
 
 #![crate_type="dylib"]
@@ -8,21 +14,15 @@ extern crate syntax_pos;
 extern crate rustc;
 extern crate rustc_plugin;
 
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::tokenstream::TokenTree;
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
-use syntax::ext::build::AstBuilder;  // trait for expr_usize
+use syntax::ext::build::AstBuilder;  // A trait for expr_usize.
 use syntax_pos::Span;
 use rustc_plugin::Registry;
 
-// WARNING WARNING WARNING WARNING WARNING
-// =======================================
-//
-// This code also appears in src/doc/unstable-book/src/language-features/plugin.md.
-// Please keep the two copies in sync!  FIXME: have rustdoc read this file
-
 fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-        -> Box<MacResult + 'static> {
+        -> Box<dyn MacResult + 'static> {
 
     static NUMERALS: &'static [(&'static str, usize)] = &[
         ("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -38,7 +38,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
     }
 
     let text = match args[0] {
-        TokenTree::Token(_, token::Ident(s, _)) => s.to_string(),
+        TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);