about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2019-05-23 13:28:27 +0000
committerbors <bors@rust-lang.org>2019-05-23 13:28:27 +0000
commit27cc0db7a248308fc2634ac68d7608a20b4a1c09 (patch)
treefa99e0dafd4ffae806ccad46fd9f26348d1a17bc /src/libsyntax/parse
parentf688ba608923bdbf6b46ec65af2f6464b6233a75 (diff)
parent90d15e770419fb4ae8e120909baafc35ef243947 (diff)
downloadrust-27cc0db7a248308fc2634ac68d7608a20b4a1c09.tar.gz
rust-27cc0db7a248308fc2634ac68d7608a20b4a1c09.zip
Auto merge of #60965 - petrochenkov:lit3, r=matklad
syntax: Continue refactoring literals

A follow up to https://github.com/rust-lang/rust/pull/60679.

https://github.com/rust-lang/rust/commit/a2fd002bd5a91ba7997057724b72b9dac8fae550: Similarly to `EscapeError`, literal parsing now produces a `LitError`.
This way we can get rid of `diag: Option<(Span, &Handler)>` in interfaces while leaving attr/mod alone.

https://github.com/rust-lang/rust/commit/d9516d11208456d4a17fe68a34c1d0a00334e62c: Gathers all components of a literal token in a single struct.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs105
-rw-r--r--src/libsyntax/parse/literal.rs583
-rw-r--r--src/libsyntax/parse/parser.rs43
-rw-r--r--src/libsyntax/parse/token.rs98
4 files changed, 412 insertions, 417 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index c97d8040761..deb76d6d70a 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,5 +1,6 @@
 use crate::ast::{self, Ident};
-use crate::parse::{token, ParseSess};
+use crate::parse::ParseSess;
+use crate::parse::token::{self, Token};
 use crate::symbol::Symbol;
 use crate::parse::unescape;
 use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
@@ -21,7 +22,7 @@ mod unicode_chars;
 
 #[derive(Clone, Debug)]
 pub struct TokenAndSpan {
-    pub tok: token::Token,
+    pub tok: Token,
     pub sp: Span,
 }
 
@@ -55,7 +56,7 @@ pub struct StringReader<'a> {
     /// Stop reading src at this index.
     crate end_src_index: usize,
     // cached:
-    peek_tok: token::Token,
+    peek_tok: Token,
     peek_span: Span,
     peek_span_src_raw: Span,
     fatal_errs: Vec<DiagnosticBuilder<'a>>,
@@ -726,7 +727,7 @@ impl<'a> StringReader<'a> {
     }
 
     /// Lex a LIT_INTEGER or a LIT_FLOAT
-    fn scan_number(&mut self, c: char) -> token::Lit {
+    fn scan_number(&mut self, c: char) -> (token::LitKind, Symbol) {
         let mut base = 10;
         let start_bpos = self.pos;
         self.bump();
@@ -753,7 +754,7 @@ impl<'a> StringReader<'a> {
                 }
                 _ => {
                     // just a 0
-                    return token::Integer(self.name_from(start_bpos));
+                    return (token::Integer, self.name_from(start_bpos));
                 }
             }
         } else if c.is_digit(10) {
@@ -765,7 +766,7 @@ impl<'a> StringReader<'a> {
         if num_digits == 0 {
             self.err_span_(start_bpos, self.pos, "no valid digits found for number");
 
-            return token::Integer(Symbol::intern("0"));
+            return (token::Integer, Symbol::intern("0"));
         }
 
         // might be a float, but don't be greedy if this is actually an
@@ -783,17 +784,17 @@ impl<'a> StringReader<'a> {
             let pos = self.pos;
             self.check_float_base(start_bpos, pos, base);
 
-            token::Float(self.name_from(start_bpos))
+            (token::Float, self.name_from(start_bpos))
         } else {
             // it might be a float if it has an exponent
             if self.ch_is('e') || self.ch_is('E') {
                 self.scan_float_exponent();
                 let pos = self.pos;
                 self.check_float_base(start_bpos, pos, base);
-                return token::Float(self.name_from(start_bpos));
+                return (token::Float, self.name_from(start_bpos));
             }
             // but we certainly have an integer!
-            token::Integer(self.name_from(start_bpos))
+            (token::Integer, self.name_from(start_bpos))
         }
     }
 
@@ -846,7 +847,7 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    fn binop(&mut self, op: token::BinOpToken) -> token::Token {
+    fn binop(&mut self, op: token::BinOpToken) -> Token {
         self.bump();
         if self.ch_is('=') {
             self.bump();
@@ -858,7 +859,7 @@ impl<'a> StringReader<'a> {
 
     /// Returns the next token from the string, advances the input past that
     /// token, and updates the interner
-    fn next_token_inner(&mut self) -> Result<token::Token, ()> {
+    fn next_token_inner(&mut self) -> Result<Token, ()> {
         let c = self.ch;
 
         if ident_start(c) {
@@ -912,10 +913,10 @@ impl<'a> StringReader<'a> {
         }
 
         if is_dec_digit(c) {
-            let num = self.scan_number(c.unwrap());
+            let (kind, symbol) = self.scan_number(c.unwrap());
             let suffix = self.scan_optional_raw_name();
-            debug!("next_token_inner: scanned number {:?}, {:?}", num, suffix);
-            return Ok(token::Literal(num, suffix));
+            debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix);
+            return Ok(Token::lit(kind, symbol, suffix));
         }
 
         match c.expect("next_token_inner called at EOF") {
@@ -1073,10 +1074,10 @@ impl<'a> StringReader<'a> {
                     // lifetimes shouldn't end with a single quote
                     // if we find one, then this is an invalid character literal
                     if self.ch_is('\'') {
-                        let id = self.name_from(start);
+                        let symbol = self.name_from(start);
                         self.bump();
                         self.validate_char_escape(start_with_quote);
-                        return Ok(token::Literal(token::Char(id), None))
+                        return Ok(Token::lit(token::Char, symbol, None));
                     }
 
                     // Include the leading `'` in the real identifier, for macro
@@ -1098,43 +1099,43 @@ impl<'a> StringReader<'a> {
                     return Ok(token::Lifetime(ident));
                 }
                 let msg = "unterminated character literal";
-                let id = self.scan_single_quoted_string(start_with_quote, msg);
+                let symbol = self.scan_single_quoted_string(start_with_quote, msg);
                 self.validate_char_escape(start_with_quote);
                 let suffix = self.scan_optional_raw_name();
-                Ok(token::Literal(token::Char(id), suffix))
+                Ok(Token::lit(token::Char, symbol, suffix))
             }
             'b' => {
                 self.bump();
-                let lit = match self.ch {
+                let (kind, symbol) = match self.ch {
                     Some('\'') => {
                         let start_with_quote = self.pos;
                         self.bump();
                         let msg = "unterminated byte constant";
-                        let id = self.scan_single_quoted_string(start_with_quote, msg);
+                        let symbol = self.scan_single_quoted_string(start_with_quote, msg);
                         self.validate_byte_escape(start_with_quote);
-                        token::Byte(id)
+                        (token::Byte, symbol)
                     },
                     Some('"') => {
                         let start_with_quote = self.pos;
                         let msg = "unterminated double quote byte string";
-                        let id = self.scan_double_quoted_string(msg);
+                        let symbol = self.scan_double_quoted_string(msg);
                         self.validate_byte_str_escape(start_with_quote);
-                        token::ByteStr(id)
+                        (token::ByteStr, symbol)
                     },
                     Some('r') => self.scan_raw_byte_string(),
                     _ => unreachable!(),  // Should have been a token::Ident above.
                 };
                 let suffix = self.scan_optional_raw_name();
 
-                Ok(token::Literal(lit, suffix))
+                Ok(Token::lit(kind, symbol, suffix))
             }
             '"' => {
                 let start_with_quote = self.pos;
                 let msg = "unterminated double quote string";
-                let id = self.scan_double_quoted_string(msg);
+                let symbol = self.scan_double_quoted_string(msg);
                 self.validate_str_escape(start_with_quote);
                 let suffix = self.scan_optional_raw_name();
-                Ok(token::Literal(token::Str_(id), suffix))
+                Ok(Token::lit(token::Str, symbol, suffix))
             }
             'r' => {
                 let start_bpos = self.pos;
@@ -1205,14 +1206,14 @@ impl<'a> StringReader<'a> {
                 }
 
                 self.bump();
-                let id = if valid {
+                let symbol = if valid {
                     self.name_from_to(content_start_bpos, content_end_bpos)
                 } else {
                     Symbol::intern("??")
                 };
                 let suffix = self.scan_optional_raw_name();
 
-                Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
+                Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix))
             }
             '-' => {
                 if self.nextch_is('>') {
@@ -1366,7 +1367,7 @@ impl<'a> StringReader<'a> {
         id
     }
 
-    fn scan_raw_byte_string(&mut self) -> token::Lit {
+    fn scan_raw_byte_string(&mut self) -> (token::LitKind, Symbol) {
         let start_bpos = self.pos;
         self.bump();
         let mut hash_count = 0;
@@ -1423,7 +1424,7 @@ impl<'a> StringReader<'a> {
 
         self.bump();
 
-        token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos), hash_count)
+        (token::ByteStrRaw(hash_count), self.name_from_to(content_start_bpos, content_end_bpos))
     }
 
     fn validate_char_escape(&self, start_with_quote: BytePos) {
@@ -1637,15 +1638,19 @@ mod tests {
 
     // check that the given reader produces the desired stream
     // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<token::Token>) {
+    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<Token>) {
         for expected_tok in &expected {
             assert_eq!(&string_reader.next_token().tok, expected_tok);
         }
     }
 
     // make the identifier by looking up the string in the interner
-    fn mk_ident(id: &str) -> token::Token {
-        token::Token::from_ast_ident(Ident::from_str(id))
+    fn mk_ident(id: &str) -> Token {
+        Token::from_ast_ident(Ident::from_str(id))
+    }
+
+    fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token {
+        Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
     }
 
     #[test]
@@ -1694,7 +1699,7 @@ mod tests {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
-                    token::Literal(token::Char(Symbol::intern("a")), None));
+                       mk_lit(token::Char, "a", None));
         })
     }
 
@@ -1704,7 +1709,7 @@ mod tests {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
-                    token::Literal(token::Char(Symbol::intern(" ")), None));
+                       mk_lit(token::Char, " ", None));
         })
     }
 
@@ -1714,7 +1719,7 @@ mod tests {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
-                    token::Literal(token::Char(Symbol::intern("\\n")), None));
+                       mk_lit(token::Char, "\\n", None));
         })
     }
 
@@ -1724,7 +1729,7 @@ mod tests {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
-                    token::Lifetime(Ident::from_str("'abc")));
+                       token::Lifetime(Ident::from_str("'abc")));
         })
     }
 
@@ -1733,10 +1738,8 @@ mod tests {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
-                        .next_token()
-                        .tok,
-                    token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
+            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok,
+                       mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
         })
     }
 
@@ -1748,18 +1751,16 @@ mod tests {
             macro_rules! test {
                 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
                     assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
-                            token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
-                                            Some(Symbol::intern("suffix"))));
+                               mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
                     // with a whitespace separator:
                     assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
-                            token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
-                                            None));
+                               mk_lit(token::$tok_type, $tok_contents, None));
                 }}
             }
 
             test!("'a'", Char, "a");
             test!("b'a'", Byte, "a");
-            test!("\"a\"", Str_, "a");
+            test!("\"a\"", Str, "a");
             test!("b\"a\"", ByteStr, "a");
             test!("1234", Integer, "1234");
             test!("0b101", Integer, "0b101");
@@ -1768,14 +1769,11 @@ mod tests {
             test!("1.0e10", Float, "1.0e10");
 
             assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
-                    token::Literal(token::Integer(Symbol::intern("2")),
-                                    Some(Symbol::intern("us"))));
+                       mk_lit(token::Integer, "2", Some("us")));
             assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
-                    token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
-                                    Some(Symbol::intern("suffix"))));
+                       mk_lit(token::StrRaw(3), "raw", Some("suffix")));
             assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
-                    token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
-                                    Some(Symbol::intern("suffix"))));
+                       mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
         })
     }
 
@@ -1796,8 +1794,7 @@ mod tests {
                 token::Comment => {}
                 _ => panic!("expected a comment!"),
             }
-            assert_eq!(lexer.next_token().tok,
-                    token::Literal(token::Char(Symbol::intern("a")), None));
+            assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None));
         })
     }
 
diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs
index f277f0522b8..0305b1f59b9 100644
--- a/src/libsyntax/parse/literal.rs
+++ b/src/libsyntax/parse/literal.rs
@@ -6,7 +6,7 @@ use crate::parse::PResult;
 use crate::parse::token::{self, Token};
 use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
 use crate::print::pprust;
-use crate::symbol::{kw, Symbol};
+use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{TokenStream, TokenTree};
 
 use errors::{Applicability, Handler};
@@ -16,132 +16,180 @@ use syntax_pos::Span;
 
 use std::ascii;
 
-macro_rules! err {
-    ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
-        match $opt_diag {
-            Some(($span, $diag)) => { $($body)* }
-            None => return None,
+crate enum LitError {
+    NotLiteral,
+    LexerError,
+    InvalidSuffix,
+    InvalidIntSuffix,
+    InvalidFloatSuffix,
+    NonDecimalFloat(u32),
+    IntTooLarge,
+}
+
+impl LitError {
+    fn report(&self, diag: &Handler, lit: token::Lit, span: Span) {
+        let token::Lit { kind, suffix, .. } = lit;
+        match *self {
+            // `NotLiteral` is not an error by itself, so we don't report
+            // it and give the parser opportunity to try something else.
+            LitError::NotLiteral => {}
+            // `LexerError` *is* an error, but it was already reported
+            // by lexer, so here we don't report it the second time.
+            LitError::LexerError => {}
+            LitError::InvalidSuffix => {
+                expect_no_suffix(
+                    diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix
+                );
+            }
+            LitError::InvalidIntSuffix => {
+                let suf = suffix.expect("suffix error with no suffix").as_str();
+                if looks_like_width_suffix(&['i', 'u'], &suf) {
+                    // If it looks like a width, try to be helpful.
+                    let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+                    diag.struct_span_err(span, &msg)
+                        .help("valid widths are 8, 16, 32, 64 and 128")
+                        .emit();
+                } else {
+                    let msg = format!("invalid suffix `{}` for integer literal", suf);
+                    diag.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
+                        .emit();
+                }
+            }
+            LitError::InvalidFloatSuffix => {
+                let suf = suffix.expect("suffix error with no suffix").as_str();
+                if looks_like_width_suffix(&['f'], &suf) {
+                    // If it looks like a width, try to be helpful.
+                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+                    diag.struct_span_err(span, &msg)
+                        .help("valid widths are 32 and 64")
+                        .emit();
+                } else {
+                    let msg = format!("invalid suffix `{}` for float literal", suf);
+                    diag.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("valid suffixes are `f32` and `f64`")
+                        .emit();
+                }
+            }
+            LitError::NonDecimalFloat(base) => {
+                let descr = match base {
+                    16 => "hexadecimal",
+                    8 => "octal",
+                    2 => "binary",
+                    _ => unreachable!(),
+                };
+                diag.struct_span_err(span, &format!("{} float literal is not supported", descr))
+                    .span_label(span, "not supported")
+                    .emit();
+            }
+            LitError::IntTooLarge => {
+                diag.struct_span_err(span, "integer literal is too large")
+                    .emit();
+            }
         }
     }
 }
 
 impl LitKind {
-    /// Converts literal token with a suffix into a semantic literal.
-    /// Works speculatively and may return `None` if diagnostic handler is not passed.
-    /// If diagnostic handler is passed, always returns `Some`,
-    /// possibly after reporting non-fatal errors and recovery.
-    fn from_lit_token(
-        lit: token::Lit,
-        suf: Option<Symbol>,
-        diag: Option<(Span, &Handler)>
-    ) -> Option<LitKind> {
-        if suf.is_some() && !lit.may_have_suffix() {
-            err!(diag, |span, diag| {
-                expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf)
-            });
+    /// Converts literal token into a semantic literal.
+    fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
+        let token::Lit { kind, symbol, suffix } = lit;
+        if suffix.is_some() && !kind.may_have_suffix() {
+            return Err(LitError::InvalidSuffix);
         }
 
-        Some(match lit {
-            token::Bool(i) => {
-                assert!(i == kw::True || i == kw::False);
-                LitKind::Bool(i == kw::True)
+        Ok(match kind {
+            token::Bool => {
+                assert!(symbol == kw::True || symbol == kw::False);
+                LitKind::Bool(symbol == kw::True)
             }
-            token::Byte(i) => {
-                match unescape_byte(&i.as_str()) {
-                    Ok(c) => LitKind::Byte(c),
-                    Err(_) => LitKind::Err(i),
-                }
-            },
-            token::Char(i) => {
-                match unescape_char(&i.as_str()) {
-                    Ok(c) => LitKind::Char(c),
-                    Err(_) => LitKind::Err(i),
-                }
-            },
-            token::Err(i) => LitKind::Err(i),
+            token::Byte => return unescape_byte(&symbol.as_str())
+                .map(LitKind::Byte).map_err(|_| LitError::LexerError),
+            token::Char => return unescape_char(&symbol.as_str())
+                .map(LitKind::Char).map_err(|_| LitError::LexerError),
 
             // There are some valid suffixes for integer and float literals,
             // so all the handling is done internally.
-            token::Integer(s) => return integer_lit(&s.as_str(), suf, diag),
-            token::Float(s) => return float_lit(&s.as_str(), suf, diag),
+            token::Integer => return integer_lit(symbol, suffix),
+            token::Float => return float_lit(symbol, suffix),
 
-            token::Str_(mut sym) => {
+            token::Str => {
                 // If there are no characters requiring special treatment we can
-                // reuse the symbol from the Token. Otherwise, we must generate a
+                // reuse the symbol from the token. Otherwise, we must generate a
                 // new symbol because the string in the LitKind is different to the
-                // string in the Token.
-                let mut has_error = false;
-                let s = &sym.as_str();
-                if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
+                // string in the token.
+                let s = symbol.as_str();
+                let symbol = if s.contains(&['\\', '\r'][..]) {
                     let mut buf = String::with_capacity(s.len());
-                    unescape_str(s, &mut |_, unescaped_char| {
+                    let mut error = Ok(());
+                    unescape_str(&s, &mut |_, unescaped_char| {
                         match unescaped_char {
                             Ok(c) => buf.push(c),
-                            Err(_) => has_error = true,
+                            Err(_) => error = Err(LitError::LexerError),
                         }
                     });
-                    if has_error {
-                        return Some(LitKind::Err(sym));
-                    }
-                    sym = Symbol::intern(&buf)
-                }
-
-                LitKind::Str(sym, ast::StrStyle::Cooked)
+                    error?;
+                    Symbol::intern(&buf)
+                } else {
+                    symbol
+                };
+                LitKind::Str(symbol, ast::StrStyle::Cooked)
             }
-            token::StrRaw(mut sym, n) => {
+            token::StrRaw(n) => {
                 // Ditto.
-                let s = &sym.as_str();
-                if s.contains('\r') {
-                    sym = Symbol::intern(&raw_str_lit(s));
-                }
-                LitKind::Str(sym, ast::StrStyle::Raw(n))
+                let s = symbol.as_str();
+                let symbol = if s.contains('\r') {
+                    Symbol::intern(&raw_str_lit(&s))
+                } else {
+                    symbol
+                };
+                LitKind::Str(symbol, ast::StrStyle::Raw(n))
             }
-            token::ByteStr(i) => {
-                let s = &i.as_str();
+            token::ByteStr => {
+                let s = symbol.as_str();
                 let mut buf = Vec::with_capacity(s.len());
-                let mut has_error = false;
-                unescape_byte_str(s, &mut |_, unescaped_byte| {
+                let mut error = Ok(());
+                unescape_byte_str(&s, &mut |_, unescaped_byte| {
                     match unescaped_byte {
                         Ok(c) => buf.push(c),
-                        Err(_) => has_error = true,
+                        Err(_) => error = Err(LitError::LexerError),
                     }
                 });
-                if has_error {
-                    return Some(LitKind::Err(i));
-                }
+                error?;
                 buf.shrink_to_fit();
                 LitKind::ByteStr(Lrc::new(buf))
             }
-            token::ByteStrRaw(i, _) => {
-                LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))
-            }
+            token::ByteStrRaw(_) => LitKind::ByteStr(Lrc::new(symbol.to_string().into_bytes())),
+            token::Err => LitKind::Err(symbol),
         })
     }
 
     /// Attempts to recover a token from semantic literal.
     /// This function is used when the original token doesn't exist (e.g. the literal is created
     /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
-    pub fn to_lit_token(&self) -> (token::Lit, Option<Symbol>) {
-        match *self {
+    pub fn to_lit_token(&self) -> token::Lit {
+        let (kind, symbol, suffix) = match *self {
             LitKind::Str(string, ast::StrStyle::Cooked) => {
                 let escaped = string.as_str().escape_default().to_string();
-                (token::Lit::Str_(Symbol::intern(&escaped)), None)
+                (token::Str, Symbol::intern(&escaped), None)
             }
             LitKind::Str(string, ast::StrStyle::Raw(n)) => {
-                (token::Lit::StrRaw(string, n), None)
+                (token::StrRaw(n), string, None)
             }
             LitKind::ByteStr(ref bytes) => {
                 let string = bytes.iter().cloned().flat_map(ascii::escape_default)
                     .map(Into::<char>::into).collect::<String>();
-                (token::Lit::ByteStr(Symbol::intern(&string)), None)
+                (token::ByteStr, Symbol::intern(&string), None)
             }
             LitKind::Byte(byte) => {
                 let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
-                (token::Lit::Byte(Symbol::intern(&string)), None)
+                (token::Byte, Symbol::intern(&string), None)
             }
             LitKind::Char(ch) => {
                 let string: String = ch.escape_default().map(Into::<char>::into).collect();
-                (token::Lit::Char(Symbol::intern(&string)), None)
+                (token::Char, Symbol::intern(&string), None)
             }
             LitKind::Int(n, ty) => {
                 let suffix = match ty {
@@ -149,64 +197,66 @@ impl LitKind {
                     ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
                     ast::LitIntType::Unsuffixed => None,
                 };
-                (token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
+                (token::Integer, Symbol::intern(&n.to_string()), suffix)
             }
             LitKind::Float(symbol, ty) => {
-                (token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
+                (token::Float, symbol, Some(Symbol::intern(ty.ty_to_string())))
+            }
+            LitKind::FloatUnsuffixed(symbol) => {
+                (token::Float, symbol, None)
             }
-            LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
             LitKind::Bool(value) => {
-                let kw = if value { kw::True } else { kw::False };
-                (token::Lit::Bool(kw), None)
+                let symbol = if value { kw::True } else { kw::False };
+                (token::Bool, symbol, None)
             }
-            LitKind::Err(val) => (token::Lit::Err(val), None),
-        }
+            LitKind::Err(symbol) => {
+                (token::Err, symbol, None)
+            }
+        };
+
+        token::Lit::new(kind, symbol, suffix)
     }
 }
 
 impl Lit {
-    /// Converts literal token with a suffix into an AST literal.
-    /// Works speculatively and may return `None` if diagnostic handler is not passed.
-    /// If diagnostic handler is passed, may return `Some`,
-    /// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors.
-    crate fn from_token(
-        token: &token::Token,
-        span: Span,
-        diag: Option<(Span, &Handler)>,
-    ) -> Option<Lit> {
-        let (token, suffix) = match *token {
+    /// Converts literal token into an AST literal.
+    fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
+        Ok(Lit { token, node: LitKind::from_lit_token(token)?, span })
+    }
+
+    /// Converts arbitrary token into an AST literal.
+    crate fn from_token(token: &Token, span: Span) -> Result<Lit, LitError> {
+        let lit = match *token {
             token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
-                (token::Bool(ident.name), None),
-            token::Literal(token, suffix) =>
-                (token, suffix),
+                token::Lit::new(token::Bool, ident.name, None),
+            token::Literal(lit) =>
+                lit,
             token::Interpolated(ref nt) => {
                 if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
                     if let ast::ExprKind::Lit(lit) = &expr.node {
-                        return Some(lit.clone());
+                        return Ok(lit.clone());
                     }
                 }
-                return None;
+                return Err(LitError::NotLiteral);
             }
-            _ => return None,
+            _ => return Err(LitError::NotLiteral)
         };
 
-        let node = LitKind::from_lit_token(token, suffix, diag)?;
-        Some(Lit { node, token, suffix, span })
+        Lit::from_lit_token(lit, span)
     }
 
     /// Attempts to recover an AST literal from semantic literal.
     /// This function is used when the original token doesn't exist (e.g. the literal is created
     /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
     pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
-        let (token, suffix) = node.to_lit_token();
-        Lit { node, token, suffix, span }
+        Lit { token: node.to_lit_token(), node, span }
     }
 
     /// Losslessly convert an AST literal into a token stream.
     crate fn tokens(&self) -> TokenStream {
-        let token = match self.token {
-            token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
-            token => Token::Literal(token, self.suffix),
+        let token = match self.token.kind {
+            token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
+            _ => token::Literal(self.token),
         };
         TokenTree::Token(self.span, token).into()
     }
@@ -215,24 +265,22 @@ impl Lit {
 impl<'a> Parser<'a> {
     /// Matches `lit = true | false | token_lit`.
     crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
-        let diag = Some((self.span, &self.sess.span_diagnostic));
-        if let Some(lit) = Lit::from_token(&self.token, self.span, diag) {
-            self.bump();
-            return Ok(lit);
-        } else if self.token == token::Dot {
-            // Recover `.4` as `0.4`.
-            let recovered = self.look_ahead(1, |t| {
-                if let token::Literal(token::Integer(val), suf) = *t {
+        let mut recovered = None;
+        if self.token == token::Dot {
+            // Attempt to recover `.4` as `0.4`.
+            recovered = self.look_ahead(1, |t| {
+                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t {
                     let next_span = self.look_ahead_span(1);
                     if self.span.hi() == next_span.lo() {
-                        let sym = String::from("0.") + &val.as_str();
-                        let token = token::Literal(token::Float(Symbol::intern(&sym)), suf);
+                        let s = String::from("0.") + &symbol.as_str();
+                        let token = Token::lit(token::Float, Symbol::intern(&s), suffix);
                         return Some((token, self.span.to(next_span)));
                     }
                 }
                 None
             });
-            if let Some((token, span)) = recovered {
+            if let Some((ref token, span)) = recovered {
+                self.bump();
                 self.diagnostic()
                     .struct_span_err(span, "float literals must have an integer part")
                     .span_suggestion(
@@ -242,63 +290,68 @@ impl<'a> Parser<'a> {
                         Applicability::MachineApplicable,
                     )
                     .emit();
-                let diag = Some((span, &self.sess.span_diagnostic));
-                if let Some(lit) = Lit::from_token(&token, span, diag) {
-                    self.bump();
-                    self.bump();
-                    return Ok(lit);
-                }
             }
         }
 
-        Err(self.span_fatal(self.span, &format!("unexpected token: {}", self.this_token_descr())))
-    }
-}
+        let (token, span) = recovered.as_ref().map_or((&self.token, self.span),
+                                                      |(token, span)| (token, *span));
 
-crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option<ast::Name>) {
-    match suffix {
-        None => {/* everything ok */}
-        Some(suf) => {
-            let text = suf.as_str();
-            if text.is_empty() {
-                diag.span_bug(sp, "found empty literal suffix in Some")
+        match Lit::from_token(token, span) {
+            Ok(lit) => {
+                self.bump();
+                Ok(lit)
+            }
+            Err(LitError::NotLiteral) => {
+                let msg = format!("unexpected token: {}", self.this_token_descr());
+                Err(self.span_fatal(span, &msg))
+            }
+            Err(err) => {
+                let lit = token.expect_lit();
+                self.bump();
+                err.report(&self.sess.span_diagnostic, lit, span);
+                let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix);
+                Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
             }
-            let mut err = if kind == "a tuple index" &&
-                ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
-            {
-                // #59553: warn instead of reject out of hand to allow the fix to percolate
-                // through the ecosystem when people fix their macros
-                let mut err = diag.struct_span_warn(
-                    sp,
-                    &format!("suffixes on {} are invalid", kind),
-                );
-                err.note(&format!(
-                    "`{}` is *temporarily* accepted on tuple index fields as it was \
-                        incorrectly accepted on stable for a few releases",
-                    text,
-                ));
-                err.help(
-                    "on proc macros, you'll want to use `syn::Index::from` or \
-                        `proc_macro::Literal::*_unsuffixed` for code that will desugar \
-                        to tuple field access",
-                );
-                err.note(
-                    "for more context, see https://github.com/rust-lang/rust/issues/60210",
-                );
-                err
-            } else {
-                diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
-            };
-            err.span_label(sp, format!("invalid suffix `{}`", text));
-            err.emit();
         }
     }
 }
 
+crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option<Symbol>) {
+    if let Some(suf) = suffix {
+        let mut err = if kind == "a tuple index" &&
+                         [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) {
+            // #59553: warn instead of reject out of hand to allow the fix to percolate
+            // through the ecosystem when people fix their macros
+            let mut err = diag.struct_span_warn(
+                sp,
+                &format!("suffixes on {} are invalid", kind),
+            );
+            err.note(&format!(
+                "`{}` is *temporarily* accepted on tuple index fields as it was \
+                    incorrectly accepted on stable for a few releases",
+                suf,
+            ));
+            err.help(
+                "on proc macros, you'll want to use `syn::Index::from` or \
+                    `proc_macro::Literal::*_unsuffixed` for code that will desugar \
+                    to tuple field access",
+            );
+            err.note(
+                "for more context, see https://github.com/rust-lang/rust/issues/60210",
+            );
+            err
+        } else {
+            diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+        };
+        err.span_label(sp, format!("invalid suffix `{}`", suf));
+        err.emit();
+    }
+}
+
 /// Parses a string representing a raw string literal into its final form. The
 /// only operation this does is convert embedded CRLF into a single LF.
 fn raw_str_lit(lit: &str) -> String {
-    debug!("raw_str_lit: given {}", lit.escape_default());
+    debug!("raw_str_lit: {:?}", lit);
     let mut res = String::with_capacity(lit.len());
 
     let mut chars = lit.chars().peekable();
@@ -318,169 +371,87 @@ fn raw_str_lit(lit: &str) -> String {
     res
 }
 
-// check if `s` looks like i32 or u1234 etc.
+// Checks if `s` looks like i32 or u1234 etc.
 fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
-    s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
+    s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
 }
 
-fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                      -> Option<LitKind> {
-    debug!("filtered_float_lit: {}, {:?}", data, suffix);
-    let suffix = match suffix {
-        Some(suffix) => suffix,
-        None => return Some(LitKind::FloatUnsuffixed(data)),
-    };
-
-    Some(match &*suffix.as_str() {
-        "f32" => LitKind::Float(data, ast::FloatTy::F32),
-        "f64" => LitKind::Float(data, ast::FloatTy::F64),
-        suf => {
-            err!(diag, |span, diag| {
-                if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
-                    // if it looks like a width, lets try to be helpful.
-                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
-                    diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
-                } else {
-                    let msg = format!("invalid suffix `{}` for float literal", suf);
-                    diag.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{}`", suf))
-                        .help("valid suffixes are `f32` and `f64`")
-                        .emit();
-                }
-            });
+fn strip_underscores(symbol: Symbol) -> Symbol {
+    // Do not allocate a new string unless necessary.
+    let s = symbol.as_str();
+    if s.contains('_') {
+        let mut s = s.to_string();
+        s.retain(|c| c != '_');
+        return Symbol::intern(&s);
+    }
+    symbol
+}
 
-            LitKind::FloatUnsuffixed(data)
+fn filtered_float_lit(symbol: Symbol, suffix: Option<Symbol>, base: u32)
+                      -> Result<LitKind, LitError> {
+    debug!("filtered_float_lit: {:?}, {:?}, {:?}", symbol, suffix, base);
+    if base != 10 {
+        return Err(LitError::NonDecimalFloat(base));
+    }
+    Ok(match suffix {
+        Some(suf) => match suf {
+            sym::f32 => LitKind::Float(symbol, ast::FloatTy::F32),
+            sym::f64 => LitKind::Float(symbol, ast::FloatTy::F64),
+            _ => return Err(LitError::InvalidFloatSuffix),
         }
+        None => LitKind::FloatUnsuffixed(symbol)
     })
 }
-fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                 -> Option<LitKind> {
-    debug!("float_lit: {:?}, {:?}", s, suffix);
-    // FIXME #2252: bounds checking float literals is deferred until trans
-
-    // Strip underscores without allocating a new String unless necessary.
-    let s2;
-    let s = if s.chars().any(|c| c == '_') {
-        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-        &s2
-    } else {
-        s
-    };
 
-    filtered_float_lit(Symbol::intern(s), suffix, diag)
+fn float_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitError> {
+    debug!("float_lit: {:?}, {:?}", symbol, suffix);
+    filtered_float_lit(strip_underscores(symbol), suffix, 10)
 }
 
-fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                   -> Option<LitKind> {
-    // s can only be ascii, byte indexing is fine
-
-    // Strip underscores without allocating a new String unless necessary.
-    let s2;
-    let mut s = if s.chars().any(|c| c == '_') {
-        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-        &s2
-    } else {
-        s
-    };
-
-    debug!("integer_lit: {}, {:?}", s, suffix);
+fn integer_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitError> {
+    debug!("integer_lit: {:?}, {:?}", symbol, suffix);
+    let symbol = strip_underscores(symbol);
+    let s = symbol.as_str();
 
     let mut base = 10;
-    let orig = s;
-    let mut ty = ast::LitIntType::Unsuffixed;
-
-    if s.starts_with('0') && s.len() > 1 {
+    if s.len() > 1 && s.as_bytes()[0] == b'0' {
         match s.as_bytes()[1] {
             b'x' => base = 16,
             b'o' => base = 8,
             b'b' => base = 2,
-            _ => { }
+            _ => {}
         }
     }
 
-    // 1f64 and 2f32 etc. are valid float literals.
-    if let Some(suf) = suffix {
-        if looks_like_width_suffix(&['f'], &suf.as_str()) {
-            let err = match base {
-                16 => Some("hexadecimal float literal is not supported"),
-                8 => Some("octal float literal is not supported"),
-                2 => Some("binary float literal is not supported"),
-                _ => None,
-            };
-            if let Some(err) = err {
-                err!(diag, |span, diag| {
-                    diag.struct_span_err(span, err)
-                        .span_label(span, "not supported")
-                        .emit();
-                });
-            }
-            return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
-        }
-    }
-
-    if base != 10 {
-        s = &s[2..];
-    }
-
-    if let Some(suf) = suffix {
-        if suf.as_str().is_empty() {
-            err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
-        }
-        ty = match &*suf.as_str() {
-            "isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
-            "i8"  => ast::LitIntType::Signed(ast::IntTy::I8),
-            "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
-            "i32" => ast::LitIntType::Signed(ast::IntTy::I32),
-            "i64" => ast::LitIntType::Signed(ast::IntTy::I64),
-            "i128" => ast::LitIntType::Signed(ast::IntTy::I128),
-            "usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
-            "u8"  => ast::LitIntType::Unsigned(ast::UintTy::U8),
-            "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
-            "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
-            "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
-            "u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
-            suf => {
-                // i<digits> and u<digits> look like widths, so lets
-                // give an error message along those lines
-                err!(diag, |span, diag| {
-                    if looks_like_width_suffix(&['i', 'u'], suf) {
-                        let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
-                        diag.struct_span_err(span, &msg)
-                            .help("valid widths are 8, 16, 32, 64 and 128")
-                            .emit();
-                    } else {
-                        let msg = format!("invalid suffix `{}` for numeric literal", suf);
-                        diag.struct_span_err(span, &msg)
-                            .span_label(span, format!("invalid suffix `{}`", suf))
-                            .help("the suffix must be one of the integral types \
-                                   (`u32`, `isize`, etc)")
-                            .emit();
-                    }
-                });
-
-                ty
-            }
+    let ty = match suffix {
+        Some(suf) => match suf {
+            sym::isize => ast::LitIntType::Signed(ast::IntTy::Isize),
+            sym::i8  => ast::LitIntType::Signed(ast::IntTy::I8),
+            sym::i16 => ast::LitIntType::Signed(ast::IntTy::I16),
+            sym::i32 => ast::LitIntType::Signed(ast::IntTy::I32),
+            sym::i64 => ast::LitIntType::Signed(ast::IntTy::I64),
+            sym::i128 => ast::LitIntType::Signed(ast::IntTy::I128),
+            sym::usize => ast::LitIntType::Unsigned(ast::UintTy::Usize),
+            sym::u8  => ast::LitIntType::Unsigned(ast::UintTy::U8),
+            sym::u16 => ast::LitIntType::Unsigned(ast::UintTy::U16),
+            sym::u32 => ast::LitIntType::Unsigned(ast::UintTy::U32),
+            sym::u64 => ast::LitIntType::Unsigned(ast::UintTy::U64),
+            sym::u128 => ast::LitIntType::Unsigned(ast::UintTy::U128),
+            // `1f64` and `2f32` etc. are valid float literals, and
+            // `fxxx` looks more like an invalid float literal than invalid integer literal.
+            _ if suf.as_str().starts_with('f') => return filtered_float_lit(symbol, suffix, base),
+            _ => return Err(LitError::InvalidIntSuffix),
         }
-    }
-
-    debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
-           string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
-
-    Some(match u128::from_str_radix(s, base) {
-        Ok(r) => LitKind::Int(r, ty),
-        Err(_) => {
-            // small bases are lexed as if they were base 10, e.g, the string
-            // might be `0b10201`. This will cause the conversion above to fail,
-            // but these cases have errors in the lexer: we don't want to emit
-            // two errors, and we especially don't want to emit this error since
-            // it isn't necessarily true.
-            let already_errored = base < 10 &&
-                s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+        _ => ast::LitIntType::Unsuffixed
+    };
 
-            if !already_errored {
-                err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
-            }
-            LitKind::Int(0, ty)
-        }
+    let s = &s[if base != 10 { 2 } else { 0 } ..];
+    u128::from_str_radix(s, base).map(|i| LitKind::Int(i, ty)).map_err(|_| {
+        // Small bases are lexed as if they were base 10, e.g, the string
+        // might be `0b10201`. This will cause the conversion above to fail,
+        // but these kinds of errors are already reported by the lexer.
+        let from_lexer =
+            base < 10 && s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+        if from_lexer { LitError::LexerError } else { LitError::IntTooLarge }
     })
 }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 7600d6078a1..ae3665c834b 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -352,10 +352,12 @@ impl TokenCursor {
         let body = TokenTree::Delimited(
             delim_span,
             token::Bracket,
-            [TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
-             TokenTree::Token(sp, token::Eq),
-             TokenTree::Token(sp, token::Literal(
-                token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+            [
+                TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+                TokenTree::Token(sp, token::Eq),
+                TokenTree::Token(sp, token::Token::lit(
+                    token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
+                )),
             ]
             .iter().cloned().collect::<TokenStream>().into(),
         );
@@ -1054,7 +1056,7 @@ impl<'a> Parser<'a> {
     }
 
     fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
-        literal::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
+        literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
     }
 
     /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
@@ -2241,10 +2243,10 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_field_name(&mut self) -> PResult<'a, Ident> {
-        if let token::Literal(token::Integer(name), suffix) = self.token {
+        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
             self.expect_no_suffix(self.span, "a tuple index", suffix);
             self.bump();
-            Ok(Ident::new(name, self.prev_span))
+            Ok(Ident::new(symbol, self.prev_span))
         } else {
             self.parse_ident_common(false)
         }
@@ -3045,19 +3047,19 @@ impl<'a> Parser<'a> {
                     token::Ident(..) => {
                         e = self.parse_dot_suffix(e, lo)?;
                     }
-                    token::Literal(token::Integer(name), suffix) => {
+                    token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
                         let span = self.span;
                         self.bump();
-                        let field = ExprKind::Field(e, Ident::new(name, span));
+                        let field = ExprKind::Field(e, Ident::new(symbol, span));
                         e = self.mk_expr(lo.to(span), field, ThinVec::new());
 
                         self.expect_no_suffix(span, "a tuple index", suffix);
                     }
-                    token::Literal(token::Float(n), _suf) => {
+                    token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
                       self.bump();
-                      let fstr = n.as_str();
-                      let mut err = self.diagnostic()
-                          .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+                      let fstr = symbol.as_str();
+                      let msg = format!("unexpected token: `{}`", symbol);
+                      let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
                       err.span_label(self.prev_span, "unexpected token");
                       if fstr.chars().all(|x| "0123456789.".contains(x)) {
                           let float = match fstr.parse::<f64>().ok() {
@@ -7557,11 +7559,12 @@ impl<'a> Parser<'a> {
     /// the `extern` keyword, if one is found.
     fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
         match self.token {
-            token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
+            token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
+            token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
                 let sp = self.span;
-                self.expect_no_suffix(sp, "an ABI spec", suf);
+                self.expect_no_suffix(sp, "an ABI spec", suffix);
                 self.bump();
-                match abi::lookup(&s.as_str()) {
+                match abi::lookup(&symbol.as_str()) {
                     Some(abi) => Ok(Some(abi)),
                     None => {
                         let prev_span = self.prev_span;
@@ -7570,7 +7573,7 @@ impl<'a> Parser<'a> {
                             prev_span,
                             E0703,
                             "invalid ABI: found `{}`",
-                            s);
+                            symbol);
                         err.span_label(prev_span, "invalid ABI");
                         err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
                         err.emit();
@@ -8370,8 +8373,10 @@ impl<'a> Parser<'a> {
 
     pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
         let ret = match self.token {
-            token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
-            token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
+            token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
+                (symbol, ast::StrStyle::Cooked, suffix),
+            token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
+                (symbol, ast::StrStyle::Raw(n), suffix),
             _ => return None
         };
         self.bump();
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 663eace6b62..e5361b2db4e 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,7 +1,7 @@
 pub use BinOpToken::*;
 pub use Nonterminal::*;
 pub use DelimToken::*;
-pub use Lit::*;
+pub use LitKind::*;
 pub use Token::*;
 
 use crate::ast::{self};
@@ -59,48 +59,61 @@ impl DelimToken {
     }
 }
 
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum Lit {
-    Bool(ast::Name), // AST only, must never appear in a `Token`
-    Byte(ast::Name),
-    Char(ast::Name),
-    Err(ast::Name),
-    Integer(ast::Name),
-    Float(ast::Name),
-    Str_(ast::Name),
-    StrRaw(ast::Name, u16), /* raw str delimited by n hash symbols */
-    ByteStr(ast::Name),
-    ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
+#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub enum LitKind {
+    Bool, // AST only, must never appear in a `Token`
+    Byte,
+    Char,
+    Integer,
+    Float,
+    Str,
+    StrRaw(u16), // raw string delimited by `n` hash symbols
+    ByteStr,
+    ByteStrRaw(u16), // raw byte string delimited by `n` hash symbols
+    Err,
 }
 
-#[cfg(target_arch = "x86_64")]
-static_assert_size!(Lit, 8);
+/// A literal token.
+#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub struct Lit {
+    pub kind: LitKind,
+    pub symbol: Symbol,
+    pub suffix: Option<Symbol>,
+}
 
-impl Lit {
-    crate fn literal_name(&self) -> &'static str {
-        match *self {
-            Bool(_) => panic!("literal token contains `Lit::Bool`"),
-            Byte(_) => "byte literal",
-            Char(_) => "char literal",
-            Err(_) => "invalid literal",
-            Integer(_) => "integer literal",
-            Float(_) => "float literal",
-            Str_(_) | StrRaw(..) => "string literal",
-            ByteStr(_) | ByteStrRaw(..) => "byte string literal"
+impl LitKind {
+    /// An English article for the literal token kind.
+    crate fn article(self) -> &'static str {
+        match self {
+            Integer | Err => "an",
+            _ => "a",
         }
     }
 
-    crate fn may_have_suffix(&self) -> bool {
-        match *self {
-            Integer(..) | Float(..) => true,
+    crate fn descr(self) -> &'static str {
+        match self {
+            Bool => panic!("literal token contains `Lit::Bool`"),
+            Byte => "byte",
+            Char => "char",
+            Integer => "integer",
+            Float => "float",
+            Str | StrRaw(..) => "string",
+            ByteStr | ByteStrRaw(..) => "byte string",
+            Err => "error",
+        }
+    }
+
+    crate fn may_have_suffix(self) -> bool {
+        match self {
+            Integer | Float | Err => true,
             _ => false,
         }
     }
+}
 
-    // See comments in `Nonterminal::to_tokenstream` for why we care about
-    // *probably* equal here rather than actual equality
-    fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
-        mem::discriminant(self) == mem::discriminant(other)
+impl Lit {
+    pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
+        Lit { kind, symbol, suffix }
     }
 }
 
@@ -193,7 +206,7 @@ pub enum Token {
     CloseDelim(DelimToken),
 
     /* Literals */
-    Literal(Lit, Option<ast::Name>),
+    Literal(Lit),
 
     /* Name components */
     Ident(ast::Ident, /* is_raw */ bool),
@@ -310,6 +323,10 @@ impl Token {
         self == &Question || self == &OpenDelim(Paren)
     }
 
+    pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
+        Literal(Lit::new(kind, symbol, suffix))
+    }
+
     /// Returns `true` if the token is any literal
     crate fn is_lit(&self) -> bool {
         match *self {
@@ -318,6 +335,13 @@ impl Token {
         }
     }
 
+    crate fn expect_lit(&self) -> Lit {
+        match *self {
+            Literal(lit) => lit,
+            _=> panic!("`expect_lit` called on non-literal"),
+        }
+    }
+
     /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
     /// for example a '-42', or one of the boolean idents).
     crate fn can_begin_literal_or_bool(&self) -> bool {
@@ -564,15 +588,13 @@ impl Token {
             (&DocComment(a), &DocComment(b)) |
             (&Shebang(a), &Shebang(b)) => a == b,
 
+            (&Literal(a), &Literal(b)) => a == b,
+
             (&Lifetime(a), &Lifetime(b)) => a.name == b.name,
             (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
                                                        a.name == kw::DollarCrate ||
                                                        c.name == kw::DollarCrate),
 
-            (&Literal(ref a, b), &Literal(ref c, d)) => {
-                b == d && a.probably_equal_for_proc_macro(c)
-            }
-
             (&Interpolated(_), &Interpolated(_)) => false,
 
             _ => panic!("forgot to add a token?"),