about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorAlex Crichton <alex@alexcrichton.com>2019-05-21 15:37:07 -0700
committerAlex Crichton <alex@alexcrichton.com>2019-05-21 15:37:07 -0700
commitfe3dd0b50fef21d14591c960a9610bafb224cdbf (patch)
treeee0b62d8e500d4ce4b6f50b4fe5d9056b9826072 /src/libsyntax
parente764f475ca7fffd6167ea991afc7d1b2b3f642dc (diff)
parent50a0defd5a93523067ef239936cc2e0755220904 (diff)
downloadrust-fe3dd0b50fef21d14591c960a9610bafb224cdbf.tar.gz
rust-fe3dd0b50fef21d14591c960a9610bafb224cdbf.zip
Merge remote-tracking branch 'origin/master' into azure-pipelines
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs32
-rw-r--r--src/libsyntax/attr/builtin.rs121
-rw-r--r--src/libsyntax/attr/mod.rs141
-rw-r--r--src/libsyntax/config.rs11
-rw-r--r--src/libsyntax/diagnostics/plugin.rs6
-rw-r--r--src/libsyntax/entry.rs7
-rw-r--r--src/libsyntax/ext/base.rs4
-rw-r--r--src/libsyntax/ext/build.rs11
-rw-r--r--src/libsyntax/ext/derive.rs4
-rw-r--r--src/libsyntax/ext/expand.rs46
-rw-r--r--src/libsyntax/ext/source_util.rs4
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs37
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs16
-rw-r--r--src/libsyntax/feature_gate.rs470
-rw-r--r--src/libsyntax/mut_visit.rs4
-rw-r--r--src/libsyntax/parse/classify.rs13
-rw-r--r--src/libsyntax/parse/diagnostics.rs304
-rw-r--r--src/libsyntax/parse/lexer/comments.rs29
-rw-r--r--src/libsyntax/parse/lexer/mod.rs45
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs67
-rw-r--r--src/libsyntax/parse/literal.rs487
-rw-r--r--src/libsyntax/parse/mod.rs329
-rw-r--r--src/libsyntax/parse/parser.rs540
-rw-r--r--src/libsyntax/parse/token.rs30
-rw-r--r--src/libsyntax/print/pprust.rs170
-rw-r--r--src/libsyntax/std_inject.rs36
-rw-r--r--src/libsyntax/test.rs31
-rw-r--r--src/libsyntax/tokenstream.rs4
28 files changed, 1477 insertions, 1522 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index af2302d24f5..a6bb47bef87 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -6,6 +6,7 @@ pub use crate::symbol::{Ident, Symbol as Name};
 pub use crate::util::parser::ExprPrecedence;
 
 use crate::ext::hygiene::{Mark, SyntaxContext};
+use crate::parse::token;
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::source_map::{dummy_spanned, respan, Spanned};
@@ -15,7 +16,7 @@ use crate::ThinVec;
 
 use rustc_data_structures::indexed_vec::Idx;
 #[cfg(target_arch = "x86_64")]
-use rustc_data_structures::static_assert;
+use rustc_data_structures::static_assert_size;
 use rustc_target::spec::abi::Abi;
 use syntax_pos::{Span, DUMMY_SP};
 
@@ -71,21 +72,11 @@ pub struct Path {
 impl PartialEq<Symbol> for Path {
     fn eq(&self, symbol: &Symbol) -> bool {
         self.segments.len() == 1 && {
-            let name = self.segments[0].ident.name;
-            // Make sure these symbols are pure strings
-            debug_assert!(!symbol.is_gensymed());
-            debug_assert!(!name.is_gensymed());
-            name == *symbol
+            self.segments[0].ident.name == *symbol
         }
     }
 }
 
-impl<'a> PartialEq<&'a str> for Path {
-    fn eq(&self, string: &&'a str) -> bool {
-        self.segments.len() == 1 && self.segments[0].ident.name == *string
-    }
-}
-
 impl fmt::Debug for Path {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "path({})", pprust::path_to_string(self))
@@ -969,7 +960,7 @@ pub struct Expr {
 
 // `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(target_arch = "x86_64")]
-static_assert!(MEM_SIZE_OF_EXPR: std::mem::size_of::<Expr>() == 96);
+static_assert_size!(Expr, 96);
 
 impl Expr {
     /// Whether this expression would be valid somewhere that expects a value; for example, an `if`
@@ -1350,8 +1341,19 @@ pub enum StrStyle {
     Raw(u16),
 }
 
-/// A literal.
-pub type Lit = Spanned<LitKind>;
+/// An AST literal.
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
+pub struct Lit {
+    /// The original literal token as written in source code.
+    pub token: token::Lit,
+    /// The original literal suffix as written in source code.
+    pub suffix: Option<Symbol>,
+    /// The "semantic" representation of the literal lowered from the original tokens.
+    /// Strings are unescaped, hexadecimal forms are eliminated, etc.
+    /// FIXME: Remove this and only create the semantic representation during lowering to HIR.
+    pub node: LitKind,
+    pub span: Span,
+}
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)]
 pub enum LitIntType {
diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs
index db821f4e536..65ca96afab1 100644
--- a/src/libsyntax/attr/builtin.rs
+++ b/src/libsyntax/attr/builtin.rs
@@ -5,7 +5,7 @@ use crate::feature_gate::{Features, GatedCfg};
 use crate::parse::ParseSess;
 
 use errors::{Applicability, Handler};
-use syntax_pos::{symbol::Symbol, Span};
+use syntax_pos::{symbol::Symbol, symbol::sym, Span};
 
 use super::{mark_used, MetaItemKind};
 
@@ -80,13 +80,13 @@ pub enum UnwindAttr {
 /// Determine what `#[unwind]` attribute is present in `attrs`, if any.
 pub fn find_unwind_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> Option<UnwindAttr> {
     attrs.iter().fold(None, |ia, attr| {
-        if attr.check_name("unwind") {
+        if attr.check_name(sym::unwind) {
             if let Some(meta) = attr.meta() {
                 if let MetaItemKind::List(items) = meta.node {
                     if items.len() == 1 {
-                        if items[0].check_name("allowed") {
+                        if items[0].check_name(sym::allowed) {
                             return Some(UnwindAttr::Allowed);
-                        } else if items[0].check_name("aborts") {
+                        } else if items[0].check_name(sym::aborts) {
                             return Some(UnwindAttr::Aborts);
                         }
                     }
@@ -153,9 +153,9 @@ pub struct RustcDeprecation {
 
 /// Checks if `attrs` contains an attribute like `#![feature(feature_name)]`.
 /// This will not perform any "sanity checks" on the form of the attributes.
-pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
+pub fn contains_feature_attr(attrs: &[Attribute], feature_name: Symbol) -> bool {
     attrs.iter().any(|item| {
-        item.check_name("feature") &&
+        item.check_name(sym::feature) &&
         item.meta_item_list().map(|list| {
             list.iter().any(|mi| mi.is_word() && mi.check_name(feature_name))
         }).unwrap_or(false)
@@ -185,12 +185,12 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
 
     'outer: for attr in attrs_iter {
         if ![
-            "rustc_deprecated",
-            "rustc_const_unstable",
-            "unstable",
-            "stable",
-            "rustc_promotable",
-            "rustc_allow_const_fn_ptr",
+            sym::rustc_deprecated,
+            sym::rustc_const_unstable,
+            sym::unstable,
+            sym::stable,
+            sym::rustc_promotable,
+            sym::rustc_allow_const_fn_ptr,
         ].iter().any(|&s| attr.path == s) {
             continue // not a stability level
         }
@@ -199,10 +199,10 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
 
         let meta = attr.meta();
 
-        if attr.path == "rustc_promotable" {
+        if attr.path == sym::rustc_promotable {
             promotable = true;
         }
-        if attr.path == "rustc_allow_const_fn_ptr" {
+        if attr.path == sym::rustc_allow_const_fn_ptr {
             allow_const_fn_ptr = true;
         }
         // attributes with data
@@ -229,10 +229,9 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                     )+
                     for meta in metas {
                         if let Some(mi) = meta.meta_item() {
-                            match mi.name_or_empty().get() {
+                            match mi.name_or_empty() {
                                 $(
-                                    stringify!($name)
-                                        => if !get(mi, &mut $name) { continue 'outer },
+                                    sym::$name => if !get(mi, &mut $name) { continue 'outer },
                                 )+
                                 _ => {
                                     let expected = &[ $( stringify!($name) ),+ ];
@@ -259,8 +258,8 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                 }
             }
 
-            match meta.name_or_empty().get() {
-                "rustc_deprecated" => {
+            match meta.name_or_empty() {
+                sym::rustc_deprecated => {
                     if rustc_depr.is_some() {
                         span_err!(diagnostic, item_sp, E0540,
                                   "multiple rustc_deprecated attributes");
@@ -287,7 +286,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                         }
                     }
                 }
-                "rustc_const_unstable" => {
+                sym::rustc_const_unstable => {
                     if rustc_const_unstable.is_some() {
                         span_err!(diagnostic, item_sp, E0553,
                                   "multiple rustc_const_unstable attributes");
@@ -302,7 +301,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                         continue
                     }
                 }
-                "unstable" => {
+                sym::unstable => {
                     if stab.is_some() {
                         handle_errors(sess, attr.span, AttrError::MultipleStabilityLevels);
                         break
@@ -313,10 +312,10 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                     let mut issue = None;
                     for meta in metas {
                         if let Some(mi) = meta.meta_item() {
-                            match mi.name_or_empty().get() {
-                                "feature" => if !get(mi, &mut feature) { continue 'outer },
-                                "reason" => if !get(mi, &mut reason) { continue 'outer },
-                                "issue" => if !get(mi, &mut issue) { continue 'outer },
+                            match mi.name_or_empty() {
+                                sym::feature => if !get(mi, &mut feature) { continue 'outer },
+                                sym::reason => if !get(mi, &mut reason) { continue 'outer },
+                                sym::issue => if !get(mi, &mut issue) { continue 'outer },
                                 _ => {
                                     handle_errors(
                                         sess,
@@ -374,7 +373,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                         }
                     }
                 }
-                "stable" => {
+                sym::stable => {
                     if stab.is_some() {
                         handle_errors(sess, attr.span, AttrError::MultipleStabilityLevels);
                         break
@@ -385,11 +384,9 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                     for meta in metas {
                         match meta {
                             NestedMetaItem::MetaItem(mi) => {
-                                match mi.name_or_empty().get() {
-                                    "feature" =>
-                                        if !get(mi, &mut feature) { continue 'outer },
-                                    "since" =>
-                                        if !get(mi, &mut since) { continue 'outer },
+                                match mi.name_or_empty() {
+                                    sym::feature => if !get(mi, &mut feature) { continue 'outer },
+                                    sym::since => if !get(mi, &mut since) { continue 'outer },
                                     _ => {
                                         handle_errors(
                                             sess,
@@ -482,7 +479,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
 }
 
 pub fn find_crate_name(attrs: &[Attribute]) -> Option<Symbol> {
-    super::first_attr_value_str_by_name(attrs, "crate_name")
+    super::first_attr_value_str_by_name(attrs, sym::crate_name)
 }
 
 /// Tests if a cfg-pattern matches the cfg set
@@ -542,14 +539,14 @@ pub fn eval_condition<F>(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F)
 
             // The unwraps below may look dangerous, but we've already asserted
             // that they won't fail with the loop above.
-            match cfg.name_or_empty().get() {
-                "any" => mis.iter().any(|mi| {
+            match cfg.name_or_empty() {
+                sym::any => mis.iter().any(|mi| {
                     eval_condition(mi.meta_item().unwrap(), sess, eval)
                 }),
-                "all" => mis.iter().all(|mi| {
+                sym::all => mis.iter().all(|mi| {
                     eval_condition(mi.meta_item().unwrap(), sess, eval)
                 }),
-                "not" => {
+                sym::not => {
                     if mis.len() != 1 {
                         span_err!(sess.span_diagnostic, cfg.span, E0536, "expected 1 cfg-pattern");
                         return false;
@@ -593,7 +590,7 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess,
     let diagnostic = &sess.span_diagnostic;
 
     'outer: for attr in attrs_iter {
-        if !attr.check_name("deprecated") {
+        if !attr.check_name(sym::deprecated) {
             continue;
         }
 
@@ -645,9 +642,9 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess,
                 for meta in list {
                     match meta {
                         NestedMetaItem::MetaItem(mi) => {
-                            match mi.name_or_empty().get() {
-                                "since" => if !get(mi, &mut since) { continue 'outer },
-                                "note" => if !get(mi, &mut note) { continue 'outer },
+                            match mi.name_or_empty() {
+                                sym::since => if !get(mi, &mut since) { continue 'outer },
+                                sym::note => if !get(mi, &mut note) { continue 'outer },
                                 _ => {
                                     handle_errors(
                                         sess,
@@ -721,7 +718,7 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
 
     let mut acc = Vec::new();
     let diagnostic = &sess.span_diagnostic;
-    if attr.path == "repr" {
+    if attr.path == sym::repr {
         if let Some(items) = attr.meta_item_list() {
             mark_used(attr);
             for item in items {
@@ -739,11 +736,11 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
 
                 let mut recognised = false;
                 if item.is_word() {
-                    let hint = match item.name_or_empty().get() {
-                        "C" => Some(ReprC),
-                        "packed" => Some(ReprPacked(1)),
-                        "simd" => Some(ReprSimd),
-                        "transparent" => Some(ReprTransparent),
+                    let hint = match item.name_or_empty() {
+                        sym::C => Some(ReprC),
+                        sym::packed => Some(ReprPacked(1)),
+                        sym::simd => Some(ReprSimd),
+                        sym::transparent => Some(ReprTransparent),
                         name => int_type_of_word(name).map(ReprInt),
                     };
 
@@ -770,14 +767,14 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
                     };
 
                     let mut literal_error = None;
-                    if name == "align" {
+                    if name == sym::align {
                         recognised = true;
                         match parse_alignment(&value.node) {
                             Ok(literal) => acc.push(ReprAlign(literal)),
                             Err(message) => literal_error = Some(message)
                         };
                     }
-                    else if name == "packed" {
+                    else if name == sym::packed {
                         recognised = true;
                         match parse_alignment(&value.node) {
                             Ok(literal) => acc.push(ReprPacked(literal)),
@@ -790,7 +787,7 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
                     }
                 } else {
                     if let Some(meta_item) = item.meta_item() {
-                        if meta_item.check_name("align") {
+                        if meta_item.check_name(sym::align) {
                             if let MetaItemKind::NameValue(ref value) = meta_item.node {
                                 recognised = true;
                                 let mut err = struct_span_err!(diagnostic, item.span(), E0693,
@@ -830,22 +827,22 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
     acc
 }
 
-fn int_type_of_word(s: &str) -> Option<IntType> {
+fn int_type_of_word(s: Symbol) -> Option<IntType> {
     use IntType::*;
 
     match s {
-        "i8" => Some(SignedInt(ast::IntTy::I8)),
-        "u8" => Some(UnsignedInt(ast::UintTy::U8)),
-        "i16" => Some(SignedInt(ast::IntTy::I16)),
-        "u16" => Some(UnsignedInt(ast::UintTy::U16)),
-        "i32" => Some(SignedInt(ast::IntTy::I32)),
-        "u32" => Some(UnsignedInt(ast::UintTy::U32)),
-        "i64" => Some(SignedInt(ast::IntTy::I64)),
-        "u64" => Some(UnsignedInt(ast::UintTy::U64)),
-        "i128" => Some(SignedInt(ast::IntTy::I128)),
-        "u128" => Some(UnsignedInt(ast::UintTy::U128)),
-        "isize" => Some(SignedInt(ast::IntTy::Isize)),
-        "usize" => Some(UnsignedInt(ast::UintTy::Usize)),
+        sym::i8 => Some(SignedInt(ast::IntTy::I8)),
+        sym::u8 => Some(UnsignedInt(ast::UintTy::U8)),
+        sym::i16 => Some(SignedInt(ast::IntTy::I16)),
+        sym::u16 => Some(UnsignedInt(ast::UintTy::U16)),
+        sym::i32 => Some(SignedInt(ast::IntTy::I32)),
+        sym::u32 => Some(UnsignedInt(ast::UintTy::U32)),
+        sym::i64 => Some(SignedInt(ast::IntTy::I64)),
+        sym::u64 => Some(UnsignedInt(ast::UintTy::U64)),
+        sym::i128 => Some(SignedInt(ast::IntTy::I128)),
+        sym::u128 => Some(UnsignedInt(ast::UintTy::U128)),
+        sym::isize => Some(SignedInt(ast::IntTy::Isize)),
+        sym::usize => Some(UnsignedInt(ast::UintTy::Usize)),
         _ => None
     }
 }
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index e00f91e3952..592b40df176 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -14,15 +14,15 @@ pub use StabilityLevel::*;
 use crate::ast;
 use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
 use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
-use crate::ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
+use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam};
 use crate::mut_visit::visit_clobber;
-use crate::source_map::{BytePos, Spanned, respan, dummy_spanned};
+use crate::source_map::{BytePos, Spanned, dummy_spanned};
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::parser::Parser;
 use crate::parse::{self, ParseSess, PResult};
 use crate::parse::token::{self, Token};
 use crate::ptr::P;
-use crate::symbol::{keywords, LocalInternedString, Symbol};
+use crate::symbol::{keywords, Symbol, sym};
 use crate::ThinVec;
 use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
 use crate::GLOBALS;
@@ -81,10 +81,7 @@ impl NestedMetaItem {
     }
 
     /// Returns `true` if this list item is a MetaItem with a name of `name`.
-    pub fn check_name<T>(&self, name: T) -> bool
-    where
-        Path: PartialEq<T>,
-    {
+    pub fn check_name(&self, name: Symbol) -> bool {
         self.meta_item().map_or(false, |meta_item| meta_item.check_name(name))
     }
 
@@ -92,8 +89,8 @@ impl NestedMetaItem {
     pub fn ident(&self) -> Option<Ident> {
         self.meta_item().and_then(|meta_item| meta_item.ident())
     }
-    pub fn name_or_empty(&self) -> LocalInternedString {
-        self.ident().unwrap_or(keywords::Invalid.ident()).name.as_str()
+    pub fn name_or_empty(&self) -> Symbol {
+        self.ident().unwrap_or(keywords::Invalid.ident()).name
     }
 
     /// Gets the string value if self is a MetaItem and the MetaItem is a
@@ -154,10 +151,7 @@ impl Attribute {
     /// attribute is marked as used.
     ///
     /// To check the attribute name without marking it used, use the `path` field directly.
-    pub fn check_name<T>(&self, name: T) -> bool
-    where
-        Path: PartialEq<T>,
-    {
+    pub fn check_name(&self, name: Symbol) -> bool {
         let matches = self.path == name;
         if matches {
             mark_used(self);
@@ -173,8 +167,8 @@ impl Attribute {
             None
         }
     }
-    pub fn name_or_empty(&self) -> LocalInternedString {
-        self.ident().unwrap_or(keywords::Invalid.ident()).name.as_str()
+    pub fn name_or_empty(&self) -> Symbol {
+        self.ident().unwrap_or(keywords::Invalid.ident()).name
     }
 
     pub fn value_str(&self) -> Option<Symbol> {
@@ -211,8 +205,8 @@ impl MetaItem {
             None
         }
     }
-    pub fn name_or_empty(&self) -> LocalInternedString {
-        self.ident().unwrap_or(keywords::Invalid.ident()).name.as_str()
+    pub fn name_or_empty(&self) -> Symbol {
+        self.ident().unwrap_or(keywords::Invalid.ident()).name
     }
 
     // #[attribute(name = "value")]
@@ -250,10 +244,7 @@ impl MetaItem {
         }
     }
 
-    pub fn check_name<T>(&self, name: T) -> bool
-    where
-        Path: PartialEq<T>,
-    {
+    pub fn check_name(&self, name: Symbol) -> bool {
         self.path == name
     }
 
@@ -332,7 +323,7 @@ impl Attribute {
         if self.is_sugared_doc {
             let comment = self.value_str().unwrap();
             let meta = mk_name_value_item_str(
-                Ident::from_str("doc"),
+                Ident::with_empty_ctxt(sym::doc),
                 dummy_spanned(Symbol::intern(&strip_doc_comment_decoration(&comment.as_str()))));
             let mut attr = if self.style == ast::AttrStyle::Outer {
                 mk_attr_outer(self.span, self.id, meta)
@@ -350,12 +341,13 @@ impl Attribute {
 /* Constructors */
 
 pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
-    let value = respan(value.span, LitKind::Str(value.node, ast::StrStyle::Cooked));
-    mk_name_value_item(ident.span.to(value.span), ident, value)
+    let lit_kind = LitKind::Str(value.node, ast::StrStyle::Cooked);
+    mk_name_value_item(ident.span.to(value.span), ident, lit_kind, value.span)
 }
 
-pub fn mk_name_value_item(span: Span, ident: Ident, value: ast::Lit) -> MetaItem {
-    MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(value) }
+pub fn mk_name_value_item(span: Span, ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
+    let lit = Lit::from_lit_kind(lit_kind, lit_span);
+    MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(lit) }
 }
 
 pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
@@ -417,39 +409,40 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
 
 pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
     let style = doc_comment_style(&text.as_str());
-    let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked));
+    let lit_kind = LitKind::Str(text, ast::StrStyle::Cooked);
+    let lit = Lit::from_lit_kind(lit_kind, span);
     Attribute {
         id,
         style,
-        path: Path::from_ident(Ident::from_str("doc").with_span_pos(span)),
+        path: Path::from_ident(Ident::with_empty_ctxt(sym::doc).with_span_pos(span)),
         tokens: MetaItemKind::NameValue(lit).tokens(span),
         is_sugared_doc: true,
         span,
     }
 }
 
-pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool {
+pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
     items.iter().any(|item| {
         item.check_name(name)
     })
 }
 
-pub fn contains_name(attrs: &[Attribute], name: &str) -> bool {
+pub fn contains_name(attrs: &[Attribute], name: Symbol) -> bool {
     attrs.iter().any(|item| {
         item.check_name(name)
     })
 }
 
-pub fn find_by_name<'a>(attrs: &'a [Attribute], name: &str) -> Option<&'a Attribute> {
+pub fn find_by_name<'a>(attrs: &'a [Attribute], name: Symbol) -> Option<&'a Attribute> {
     attrs.iter().find(|attr| attr.check_name(name))
 }
 
-pub fn filter_by_name<'a>(attrs: &'a [Attribute], name: &'a str)
+pub fn filter_by_name<'a>(attrs: &'a [Attribute], name: Symbol)
     -> impl Iterator<Item = &'a Attribute> {
     attrs.iter().filter(move |attr| attr.check_name(name))
 }
 
-pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option<Symbol> {
+pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: Symbol) -> Option<Symbol> {
     attrs.iter()
         .find(|at| at.check_name(name))
         .and_then(|at| at.value_str())
@@ -561,8 +554,7 @@ impl MetaItemKind {
             Some(TokenTree::Token(_, token::Eq)) => {
                 tokens.next();
                 return if let Some(TokenTree::Token(span, token)) = tokens.next() {
-                    LitKind::from_token(token)
-                        .map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span }))
+                    Lit::from_token(&token, span, None).map(MetaItemKind::NameValue)
                 } else {
                     None
                 };
@@ -607,9 +599,9 @@ impl NestedMetaItem {
         where I: Iterator<Item = TokenTree>,
     {
         if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
-            if let Some(node) = LitKind::from_token(token) {
+            if let Some(lit) = Lit::from_token(&token, span, None) {
                 tokens.next();
-                return Some(NestedMetaItem::Literal(respan(span, node)));
+                return Some(NestedMetaItem::Literal(lit));
             }
         }
 
@@ -617,81 +609,6 @@ impl NestedMetaItem {
     }
 }
 
-impl Lit {
-    crate fn tokens(&self) -> TokenStream {
-        TokenTree::Token(self.span, self.node.token()).into()
-    }
-}
-
-impl LitKind {
-    fn token(&self) -> Token {
-        use std::ascii;
-
-        match *self {
-            LitKind::Str(string, ast::StrStyle::Cooked) => {
-                let escaped = string.as_str().escape_default().to_string();
-                Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
-            }
-            LitKind::Str(string, ast::StrStyle::Raw(n)) => {
-                Token::Literal(token::Lit::StrRaw(string, n), None)
-            }
-            LitKind::ByteStr(ref bytes) => {
-                let string = bytes.iter().cloned().flat_map(ascii::escape_default)
-                    .map(Into::<char>::into).collect::<String>();
-                Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)
-            }
-            LitKind::Byte(byte) => {
-                let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
-                Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None)
-            }
-            LitKind::Char(ch) => {
-                let string: String = ch.escape_default().map(Into::<char>::into).collect();
-                Token::Literal(token::Lit::Char(Symbol::intern(&string)), None)
-            }
-            LitKind::Int(n, ty) => {
-                let suffix = match ty {
-                    ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())),
-                    ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
-                    ast::LitIntType::Unsuffixed => None,
-                };
-                Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
-            }
-            LitKind::Float(symbol, ty) => {
-                Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
-            }
-            LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
-            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
-                "true"
-            } else {
-                "false"
-            })), false),
-            LitKind::Err(val) => Token::Literal(token::Lit::Err(val), None),
-        }
-    }
-
-    fn from_token(token: Token) -> Option<LitKind> {
-        match token {
-            Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
-            Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
-            Token::Interpolated(nt) => match *nt {
-                token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
-                    ExprKind::Lit(ref lit) => Some(lit.node.clone()),
-                    _ => None,
-                },
-                _ => None,
-            },
-            Token::Literal(lit, suf) => {
-                let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
-                if suffix_illegal && suf.is_some() {
-                    return None;
-                }
-                result
-            }
-            _ => None,
-        }
-    }
-}
-
 pub trait HasAttrs: Sized {
     fn attrs(&self) -> &[ast::Attribute];
     fn visit_attrs<F: FnOnce(&mut Vec<ast::Attribute>)>(&mut self, f: F);
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index 18173628a26..c82936afa3d 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -12,6 +12,7 @@ use crate::edition::Edition;
 use crate::mut_visit::*;
 use crate::parse::{token, ParseSess};
 use crate::ptr::P;
+use crate::symbol::sym;
 use crate::util::map_in_place::MapInPlace;
 
 use errors::Applicability;
@@ -90,7 +91,7 @@ impl<'a> StripUnconfigured<'a> {
     /// is in the original source file. Gives a compiler error if the syntax of
     /// the attribute is incorrect.
     fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
-        if !attr.check_name("cfg_attr") {
+        if !attr.check_name(sym::cfg_attr) {
             return vec![attr];
         }
 
@@ -205,7 +206,7 @@ impl<'a> StripUnconfigured<'a> {
     pub fn maybe_emit_expr_attr_err(&self, attr: &ast::Attribute) {
         if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
             let mut err = feature_err(self.sess,
-                                      "stmt_expr_attributes",
+                                      sym::stmt_expr_attributes,
                                       attr.span,
                                       GateIssue::Language,
                                       EXPLAIN_STMT_ATTR_SYNTAX);
@@ -285,9 +286,9 @@ impl<'a> StripUnconfigured<'a> {
     /// See issue #51279.
     pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) {
         for attr in param.attrs() {
-            let offending_attr = if attr.check_name("cfg") {
+            let offending_attr = if attr.check_name(sym::cfg) {
                 "cfg"
-            } else if attr.check_name("cfg_attr") {
+            } else if attr.check_name(sym::cfg_attr) {
                 "cfg_attr"
             } else {
                 continue;
@@ -350,5 +351,5 @@ impl<'a> MutVisitor for StripUnconfigured<'a> {
 }
 
 fn is_cfg(attr: &ast::Attribute) -> bool {
-    attr.check_name("cfg")
+    attr.check_name(sym::cfg)
 }
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index 21024eb41ef..c988dc61bec 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -7,7 +7,7 @@ use crate::ext::base::{ExtCtxt, MacEager, MacResult};
 use crate::ext::build::AstBuilder;
 use crate::parse::token;
 use crate::ptr::P;
-use crate::symbol::{keywords, Symbol};
+use crate::symbol::keywords;
 use crate::tokenstream::{TokenTree};
 
 use smallvec::smallvec;
@@ -121,13 +121,13 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
 
     let span = span.apply_mark(ecx.current_expansion.mark);
 
-    let sym = Ident::new(Symbol::gensym(&format!("__register_diagnostic_{}", code)), span);
+    let name = Ident::from_str_and_span(&format!("__register_diagnostic_{}", code), span).gensym();
 
     MacEager::items(smallvec![
         ecx.item_mod(
             span,
             span,
-            sym,
+            name,
             vec![],
             vec![],
         )
diff --git a/src/libsyntax/entry.rs b/src/libsyntax/entry.rs
index 09e26e29d86..0b6cf30bd27 100644
--- a/src/libsyntax/entry.rs
+++ b/src/libsyntax/entry.rs
@@ -1,5 +1,6 @@
 use crate::attr;
 use crate::ast::{Item, ItemKind};
+use crate::symbol::sym;
 
 pub enum EntryPointType {
     None,
@@ -14,11 +15,11 @@ pub enum EntryPointType {
 pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
     match item.node {
         ItemKind::Fn(..) => {
-            if attr::contains_name(&item.attrs, "start") {
+            if attr::contains_name(&item.attrs, sym::start) {
                 EntryPointType::Start
-            } else if attr::contains_name(&item.attrs, "main") {
+            } else if attr::contains_name(&item.attrs, sym::main) {
                 EntryPointType::MainAttr
-            } else if item.ident.name == "main" {
+            } else if item.ident.name == sym::main {
                 if depth == 1 {
                     // This is a top-level function so can be 'main'
                     EntryPointType::MainNamed
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index cc19acb61ad..0a88d2f8824 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -10,7 +10,7 @@ use crate::mut_visit::{self, MutVisitor};
 use crate::parse::{self, parser, DirectoryOwnership};
 use crate::parse::token;
 use crate::ptr::P;
-use crate::symbol::{keywords, Ident, Symbol};
+use crate::symbol::{keywords, Ident, Symbol, sym};
 use crate::ThinVec;
 use crate::tokenstream::{self, TokenStream};
 
@@ -871,7 +871,7 @@ impl<'a> ExtCtxt<'a> {
         let mut last_macro = None;
         loop {
             if ctxt.outer().expn_info().map_or(None, |info| {
-                if info.format.name() == "include" {
+                if info.format.name() == sym::include {
                     // Stop going up the backtrace once include! is encountered
                     return None;
                 }
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 40dd187ed28..d24106f697e 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -697,8 +697,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.expr_struct(span, self.path_ident(span, id), fields)
     }
 
-    fn expr_lit(&self, sp: Span, lit: ast::LitKind) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Lit(respan(sp, lit)))
+    fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
+        let lit = ast::Lit::from_lit_kind(lit_kind, span);
+        self.expr(span, ast::ExprKind::Lit(lit))
     }
     fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
         self.expr_lit(span, ast::LitKind::Int(i as u128,
@@ -1164,10 +1165,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis)
     }
 
-    fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind)
+    fn meta_name_value(&self, span: Span, name: ast::Name, lit_kind: ast::LitKind)
                        -> ast::MetaItem {
-        attr::mk_name_value_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp),
-                                 respan(sp, value))
+        attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span),
+                                 lit_kind, span)
     }
 
     fn item_use(&self, sp: Span,
diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs
index 6df369133d0..a24e09f127e 100644
--- a/src/libsyntax/ext/derive.rs
+++ b/src/libsyntax/ext/derive.rs
@@ -4,7 +4,7 @@ use crate::source_map::{hygiene, ExpnInfo, ExpnFormat};
 use crate::ext::base::ExtCtxt;
 use crate::ext::build::AstBuilder;
 use crate::parse::parser::PathStyle;
-use crate::symbol::Symbol;
+use crate::symbol::{Symbol, sym};
 
 use syntax_pos::Span;
 
@@ -13,7 +13,7 @@ use rustc_data_structures::fx::FxHashSet;
 pub fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
     let mut result = Vec::new();
     attrs.retain(|attr| {
-        if attr.path != "derive" {
+        if attr.path != sym::derive {
             return true;
         }
         if !attr.is_meta_item_list() {
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 82358679c0e..019ebc8566f 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -14,7 +14,7 @@ use crate::parse::token::{self, Token};
 use crate::parse::parser::Parser;
 use crate::ptr::P;
 use crate::symbol::Symbol;
-use crate::symbol::keywords;
+use crate::symbol::{keywords, sym};
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::visit::{self, Visitor};
 use crate::util::map_in_place::MapInPlace;
@@ -356,7 +356,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     self.collect_invocations(fragment, &[])
                 } else if let InvocationKind::Attr { attr: None, traits, item, .. } = invoc.kind {
                     if !item.derive_allowed() {
-                        let attr = attr::find_by_name(item.attrs(), "derive")
+                        let attr = attr::find_by_name(item.attrs(), sym::derive)
                             .expect("`derive` attribute should exist");
                         let span = attr.span;
                         let mut err = self.cx.mut_span_err(span,
@@ -376,7 +376,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     }
 
                     let mut item = self.fully_configure(item);
-                    item.visit_attrs(|attrs| attrs.retain(|a| a.path != "derive"));
+                    item.visit_attrs(|attrs| attrs.retain(|a| a.path != sym::derive));
                     let mut item_with_markers = item.clone();
                     add_derived_markers(&mut self.cx, item.span(), &traits, &mut item_with_markers);
                     let derives = derives.entry(invoc.expansion_data.mark).or_default();
@@ -510,7 +510,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         if invoc.fragment_kind == AstFragmentKind::ForeignItems &&
            !self.cx.ecfg.macros_in_extern_enabled() {
             if let SyntaxExtension::NonMacroAttr { .. } = *ext {} else {
-                emit_feature_err(&self.cx.parse_sess, "macros_in_extern",
+                emit_feature_err(&self.cx.parse_sess, sym::macros_in_extern,
                                  invoc.span(), GateIssue::Language,
                                  "macro invocations in `extern {}` blocks are experimental");
             }
@@ -636,7 +636,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             Annotatable::Item(ref item) => {
                 match item.node {
                     ItemKind::Mod(_) if self.cx.ecfg.proc_macro_hygiene() => return,
-                    ItemKind::Mod(_) => ("modules", "proc_macro_hygiene"),
+                    ItemKind::Mod(_) => ("modules", sym::proc_macro_hygiene),
                     _ => return,
                 }
             }
@@ -645,8 +645,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             Annotatable::ForeignItem(_) => return,
             Annotatable::Stmt(_) |
             Annotatable::Expr(_) if self.cx.ecfg.proc_macro_hygiene() => return,
-            Annotatable::Stmt(_) => ("statements", "proc_macro_hygiene"),
-            Annotatable::Expr(_) => ("expressions", "proc_macro_hygiene"),
+            Annotatable::Stmt(_) => ("statements", sym::proc_macro_hygiene),
+            Annotatable::Expr(_) => ("expressions", sym::proc_macro_hygiene),
         };
         emit_feature_err(
             self.cx.parse_sess,
@@ -681,7 +681,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 if let ast::ItemKind::MacroDef(_) = i.node {
                     emit_feature_err(
                         self.parse_sess,
-                        "proc_macro_hygiene",
+                        sym::proc_macro_hygiene,
                         self.span,
                         GateIssue::Language,
                         "procedural macros cannot expand to macro definitions",
@@ -724,13 +724,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 // don't stability-check macros in the same crate
                 // (the only time this is null is for syntax extensions registered as macros)
                 if def_site_span.map_or(false, |def_span| !crate_span.contains(def_span))
-                    && !span.allows_unstable(&feature.as_str())
+                    && !span.allows_unstable(feature)
                     && this.cx.ecfg.features.map_or(true, |feats| {
                     // macro features will count as lib features
                     !feats.declared_lib_features.iter().any(|&(feat, _)| feat == feature)
                 }) {
                     let explain = format!("macro {}! is unstable", path);
-                    emit_feature_err(this.cx.parse_sess, &*feature.as_str(), span,
+                    emit_feature_err(this.cx.parse_sess, feature, span,
                                      GateIssue::Library(Some(issue)), &explain);
                     this.cx.trace_macros_diag();
                 }
@@ -885,7 +885,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         }
         emit_feature_err(
             self.cx.parse_sess,
-            "proc_macro_hygiene",
+            sym::proc_macro_hygiene,
             span,
             GateIssue::Language,
             &format!("procedural macros cannot be expanded to {}", kind),
@@ -1109,7 +1109,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
                        -> Option<ast::Attribute> {
         let attr = attrs.iter()
                         .position(|a| {
-                            if a.path == "derive" {
+                            if a.path == sym::derive {
                                 *after_derive = true;
                             }
                             !attr::is_known(a) && !is_builtin_attr(a)
@@ -1117,8 +1117,8 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
                         .map(|i| attrs.remove(i));
         if let Some(attr) = &attr {
             if !self.cx.ecfg.enable_custom_inner_attributes() &&
-               attr.style == ast::AttrStyle::Inner && attr.path != "test" {
-                emit_feature_err(&self.cx.parse_sess, "custom_inner_attributes",
+               attr.style == ast::AttrStyle::Inner && attr.path != sym::test {
+                emit_feature_err(&self.cx.parse_sess, sym::custom_inner_attributes,
                                  attr.span, GateIssue::Language,
                                  "non-builtin inner attributes are unstable");
             }
@@ -1167,7 +1167,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
             self.check_attribute_inner(attr, features);
 
             // macros are expanded before any lint passes so this warning has to be hardcoded
-            if attr.path == "derive" {
+            if attr.path == sym::derive {
                 self.cx.struct_span_warn(attr.span, "`#[derive]` does nothing on macro invocations")
                     .note("this may become a hard error in a future release")
                     .emit();
@@ -1352,7 +1352,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
                 let inline_module = item.span.contains(inner) || inner.is_dummy();
 
                 if inline_module {
-                    if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
+                    if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, sym::path) {
                         self.cx.current_expansion.directory_ownership =
                             DirectoryOwnership::Owned { relative: None };
                         module.directory.push(&*path.as_str());
@@ -1485,19 +1485,19 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
     fn visit_attribute(&mut self, at: &mut ast::Attribute) {
         // turn `#[doc(include="filename")]` attributes into `#[doc(include(file="filename",
         // contents="file contents")]` attributes
-        if !at.check_name("doc") {
+        if !at.check_name(sym::doc) {
             return noop_visit_attribute(at, self);
         }
 
         if let Some(list) = at.meta_item_list() {
-            if !list.iter().any(|it| it.check_name("include")) {
+            if !list.iter().any(|it| it.check_name(sym::include)) {
                 return noop_visit_attribute(at, self);
             }
 
             let mut items = vec![];
 
             for mut it in list {
-                if !it.check_name("include") {
+                if !it.check_name(sym::include) {
                     items.push({ noop_visit_meta_list_item(&mut it, self); it });
                     continue;
                 }
@@ -1522,19 +1522,19 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
                             let include_info = vec![
                                 ast::NestedMetaItem::MetaItem(
                                     attr::mk_name_value_item_str(
-                                        Ident::from_str("file"),
+                                        Ident::with_empty_ctxt(sym::file),
                                         dummy_spanned(file),
                                     ),
                                 ),
                                 ast::NestedMetaItem::MetaItem(
                                     attr::mk_name_value_item_str(
-                                        Ident::from_str("contents"),
+                                        Ident::with_empty_ctxt(sym::contents),
                                         dummy_spanned(src_interned),
                                     ),
                                 ),
                             ];
 
-                            let include_ident = Ident::from_str("include");
+                            let include_ident = Ident::with_empty_ctxt(sym::include);
                             let item = attr::mk_list_item(DUMMY_SP, include_ident, include_info);
                             items.push(ast::NestedMetaItem::MetaItem(item));
                         }
@@ -1600,7 +1600,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
                 }
             }
 
-            let meta = attr::mk_list_item(DUMMY_SP, Ident::from_str("doc"), items);
+            let meta = attr::mk_list_item(DUMMY_SP, Ident::with_empty_ctxt(sym::doc), items);
             match at.style {
                 ast::AttrStyle::Inner => *at = attr::mk_spanned_attr_inner(at.span, at.id, meta),
                 ast::AttrStyle::Outer => *at = attr::mk_spanned_attr_outer(at.span, at.id, meta),
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 549de1628eb..e1cb90d9e71 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -4,7 +4,7 @@ use crate::ext::build::AstBuilder;
 use crate::parse::{self, token, DirectoryOwnership};
 use crate::print::pprust;
 use crate::ptr::P;
-use crate::symbol::Symbol;
+use crate::symbol::{Symbol, sym};
 use crate::tokenstream;
 
 use smallvec::SmallVec;
@@ -44,7 +44,7 @@ pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTr
 /* __rust_unstable_column!(): expands to the current column number */
 pub fn expand_column_gated(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
-    if sp.allows_unstable("__rust_unstable_column") {
+    if sp.allows_unstable(sym::__rust_unstable_column) {
         expand_column(cx, sp, tts)
     } else {
         cx.span_fatal(sp, "the __rust_unstable_column macro is unstable");
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index a53cc2fe661..672b7b42855 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -13,7 +13,7 @@ use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::Parser;
 use crate::parse::token::{self, NtTT};
 use crate::parse::token::Token::*;
-use crate::symbol::Symbol;
+use crate::symbol::{Symbol, keywords, sym};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
 
 use errors::FatalError;
@@ -252,8 +252,8 @@ pub fn compile(
     def: &ast::Item,
     edition: Edition
 ) -> SyntaxExtension {
-    let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
-    let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
+    let lhs_nm = ast::Ident::from_str("lhs").gensym();
+    let rhs_nm = ast::Ident::from_str("rhs").gensym();
 
     // Parse the macro_rules! invocation
     let body = match def.node {
@@ -376,7 +376,7 @@ pub fn compile(
     });
 
     if body.legacy {
-        let allow_internal_unstable = attr::find_by_name(&def.attrs, "allow_internal_unstable")
+        let allow_internal_unstable = attr::find_by_name(&def.attrs, sym::allow_internal_unstable)
             .map(|attr| attr
                 .meta_item_list()
                 .map(|list| list.iter()
@@ -399,11 +399,11 @@ pub fn compile(
                     vec![Symbol::intern("allow_internal_unstable_backcompat_hack")].into()
                 })
             );
-        let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe");
+        let allow_internal_unsafe = attr::contains_name(&def.attrs, sym::allow_internal_unsafe);
         let mut local_inner_macros = false;
-        if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") {
+        if let Some(macro_export) = attr::find_by_name(&def.attrs, sym::macro_export) {
             if let Some(l) = macro_export.meta_item_list() {
-                local_inner_macros = attr::list_contains_name(&l, "local_inner_macros");
+                local_inner_macros = attr::list_contains_name(&l, sym::local_inner_macros);
             }
         }
 
@@ -426,7 +426,7 @@ pub fn compile(
             edition,
         }
     } else {
-        let is_transparent = attr::contains_name(&def.attrs, "rustc_transparent_macro");
+        let is_transparent = attr::contains_name(&def.attrs, sym::rustc_transparent_macro);
 
         SyntaxExtension::DeclMacro {
             expander,
@@ -467,7 +467,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
             TokenTree::Sequence(span, ref seq) => {
                 if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
                     match *seq_tt {
-                        TokenTree::MetaVarDecl(_, _, id) => id.name == "vis",
+                        TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
                         TokenTree::Sequence(_, ref sub_seq) =>
                             sub_seq.op == quoted::KleeneOp::ZeroOrMore
                             || sub_seq.op == quoted::KleeneOp::ZeroOrOne,
@@ -1046,7 +1046,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
                 match *tok {
                     TokenTree::Token(_, ref tok) => match *tok {
                         FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(i, false) if i.name == "if" || i.name == "in" => IsInFollow::Yes,
+                        Ident(i, false) if i.name == keywords::If.name() ||
+                                           i.name == keywords::In.name() => IsInFollow::Yes,
                         _ => IsInFollow::No(tokens),
                     },
                     _ => IsInFollow::No(tokens),
@@ -1063,10 +1064,12 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
                         OpenDelim(token::DelimToken::Bracket) |
                         Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
                         BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(i, false) if i.name == "as" || i.name == "where" => IsInFollow::Yes,
+                        Ident(i, false) if i.name == keywords::As.name() ||
+                                           i.name == keywords::Where.name() => IsInFollow::Yes,
                         _ => IsInFollow::No(tokens),
                     },
-                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => IsInFollow::Yes,
+                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
+                        IsInFollow::Yes,
                     _ => IsInFollow::No(tokens),
                 }
             },
@@ -1089,16 +1092,18 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
                 match *tok {
                     TokenTree::Token(_, ref tok) => match *tok {
                         Comma => IsInFollow::Yes,
-                        Ident(i, is_raw) if is_raw || i.name != "priv" => IsInFollow::Yes,
+                        Ident(i, is_raw) if is_raw || i.name != keywords::Priv.name() =>
+                            IsInFollow::Yes,
                         ref tok => if tok.can_begin_type() {
                             IsInFollow::Yes
                         } else {
                             IsInFollow::No(tokens)
                         }
                     },
-                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"
-                                                       || frag.name == "ty"
-                                                       || frag.name == "path" => IsInFollow::Yes,
+                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::ident
+                                                       || frag.name == sym::ty
+                                                       || frag.name == sym::path =>
+                        IsInFollow::Yes,
                     _ => IsInFollow::No(tokens),
                 }
             },
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 0cefcf1ce03..e3586c1854c 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -170,10 +170,9 @@ pub fn transcribe(
                     }
 
                     LockstepIterSize::Contradiction(ref msg) => {
-                        // FIXME: this should be impossible. I (mark-i-m) believe it would
-                        // represent a bug in the macro_parser.
-                        // FIXME #2887 blame macro invoker instead
-                        cx.span_fatal(seq.span(), &msg[..]);
+                        // This should never happen because the macro parser should generate
+                        // properly-sized matches for all meta-vars.
+                        cx.span_bug(seq.span(), &msg[..]);
                     }
 
                     LockstepIterSize::Constraint(len, _) => {
@@ -188,14 +187,13 @@ pub fn transcribe(
                         // Is the repetition empty?
                         if len == 0 {
                             if seq.op == quoted::KleeneOp::OneOrMore {
-                                // FIXME: this should be impossible because we check for this in
-                                // macro_parser.rs
-                                // FIXME #2887 blame invoker
-                                cx.span_fatal(sp.entire(), "this must repeat at least once");
+                                // This should be impossible because the macro parser would not
+                                // match the given macro arm.
+                                cx.span_bug(sp.entire(), "this must repeat at least once");
                             }
                         } else {
                             // 0 is the initial counter (we have done 0 repretitions so far). `len`
-                            //   is the total number of reptitions we should generate.
+                            // is the total number of reptitions we should generate.
                             repeats.push((0, len));
 
                             // The first time we encounter the sequence we push it to the stack. It
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index c3bad3aba18..5b1a9bb739f 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -22,13 +22,13 @@ use crate::source_map::Spanned;
 use crate::edition::{ALL_EDITIONS, Edition};
 use crate::visit::{self, FnKind, Visitor};
 use crate::parse::{token, ParseSess};
-use crate::symbol::Symbol;
+use crate::symbol::{Symbol, keywords, sym};
 use crate::tokenstream::TokenTree;
 
 use errors::{DiagnosticBuilder, Handler};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_target::spec::abi::Abi;
-use syntax_pos::{Span, DUMMY_SP, symbols};
+use syntax_pos::{Span, DUMMY_SP};
 use log::debug;
 use lazy_static::lazy_static;
 
@@ -48,8 +48,8 @@ macro_rules! declare_features {
         /// Represents active features that are currently being implemented or
         /// currently being considered for addition/removal.
         const ACTIVE_FEATURES:
-            &[(&str, &str, Option<u32>, Option<Edition>, fn(&mut Features, Span))] =
-            &[$((stringify!($feature), $ver, $issue, $edition, set!($feature))),+];
+            &[(Symbol, &str, Option<u32>, Option<Edition>, fn(&mut Features, Span))] =
+            &[$((sym::$feature, $ver, $issue, $edition, set!($feature))),+];
 
         /// A set of features to be used by later passes.
         #[derive(Clone)]
@@ -80,22 +80,22 @@ macro_rules! declare_features {
 
     ($((removed, $feature: ident, $ver: expr, $issue: expr, None, $reason: expr),)+) => {
         /// Represents unstable features which have since been removed (it was once Active)
-        const REMOVED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[
-            $((stringify!($feature), $ver, $issue, $reason)),+
+        const REMOVED_FEATURES: &[(Symbol, &str, Option<u32>, Option<&str>)] = &[
+            $((sym::$feature, $ver, $issue, $reason)),+
         ];
     };
 
     ($((stable_removed, $feature: ident, $ver: expr, $issue: expr, None),)+) => {
         /// Represents stable features which have since been removed (it was once Accepted)
-        const STABLE_REMOVED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[
-            $((stringify!($feature), $ver, $issue, None)),+
+        const STABLE_REMOVED_FEATURES: &[(Symbol, &str, Option<u32>, Option<&str>)] = &[
+            $((sym::$feature, $ver, $issue, None)),+
         ];
     };
 
     ($((accepted, $feature: ident, $ver: expr, $issue: expr, None),)+) => {
         /// Those language feature has since been Accepted (it was once Active)
-        const ACCEPTED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[
-            $((stringify!($feature), $ver, $issue, None)),+
+        const ACCEPTED_FEATURES: &[(Symbol, &str, Option<u32>, Option<&str>)] = &[
+            $((sym::$feature, $ver, $issue, None)),+
         ];
     }
 }
@@ -562,9 +562,10 @@ declare_features! (
 // Some features are known to be incomplete and using them is likely to have
 // unanticipated results, such as compiler crashes. We warn the user about these
 // to alert them.
-const INCOMPLETE_FEATURES: &[&str] = &[
-    "generic_associated_types",
-    "const_generics"
+const INCOMPLETE_FEATURES: &[Symbol] = &[
+    sym::impl_trait_in_bindings,
+    sym::generic_associated_types,
+    sym::const_generics
 ];
 
 declare_features! (
@@ -860,7 +861,7 @@ pub enum AttributeType {
 pub enum AttributeGate {
     /// Is gated by a given feature gate, reason
     /// and function to check if enabled
-    Gated(Stability, &'static str, &'static str, fn(&Features) -> bool),
+    Gated(Stability, Symbol, &'static str, fn(&Features) -> bool),
 
     /// Ungated attribute, can be used on all release channels
     Ungated,
@@ -962,232 +963,232 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
     // Normal attributes
 
     (
-        symbols::warn,
+        sym::warn,
         Normal,
         template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#),
         Ungated
     ),
     (
-        symbols::allow,
+        sym::allow,
         Normal,
         template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#),
         Ungated
     ),
     (
-        symbols::forbid,
+        sym::forbid,
         Normal,
         template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#),
         Ungated
     ),
     (
-        symbols::deny,
+        sym::deny,
         Normal,
         template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#),
         Ungated
     ),
 
-    (symbols::macro_use, Normal, template!(Word, List: "name1, name2, ..."), Ungated),
-    (symbols::macro_export, Normal, template!(Word, List: "local_inner_macros"), Ungated),
-    (symbols::plugin_registrar, Normal, template!(Word), Ungated),
-
-    (symbols::cfg, Normal, template!(List: "predicate"), Ungated),
-    (symbols::cfg_attr, Normal, template!(List: "predicate, attr1, attr2, ..."), Ungated),
-    (symbols::main, Normal, template!(Word), Ungated),
-    (symbols::start, Normal, template!(Word), Ungated),
-    (symbols::repr, Normal, template!(List: "C, packed, ..."), Ungated),
-    (symbols::path, Normal, template!(NameValueStr: "file"), Ungated),
-    (symbols::automatically_derived, Normal, template!(Word), Ungated),
-    (symbols::no_mangle, Normal, template!(Word), Ungated),
-    (symbols::no_link, Normal, template!(Word), Ungated),
-    (symbols::derive, Normal, template!(List: "Trait1, Trait2, ..."), Ungated),
+    (sym::macro_use, Normal, template!(Word, List: "name1, name2, ..."), Ungated),
+    (sym::macro_export, Normal, template!(Word, List: "local_inner_macros"), Ungated),
+    (sym::plugin_registrar, Normal, template!(Word), Ungated),
+
+    (sym::cfg, Normal, template!(List: "predicate"), Ungated),
+    (sym::cfg_attr, Normal, template!(List: "predicate, attr1, attr2, ..."), Ungated),
+    (sym::main, Normal, template!(Word), Ungated),
+    (sym::start, Normal, template!(Word), Ungated),
+    (sym::repr, Normal, template!(List: "C, packed, ..."), Ungated),
+    (sym::path, Normal, template!(NameValueStr: "file"), Ungated),
+    (sym::automatically_derived, Normal, template!(Word), Ungated),
+    (sym::no_mangle, Whitelisted, template!(Word), Ungated),
+    (sym::no_link, Normal, template!(Word), Ungated),
+    (sym::derive, Normal, template!(List: "Trait1, Trait2, ..."), Ungated),
     (
-        symbols::should_panic,
+        sym::should_panic,
         Normal,
         template!(Word, List: r#"expected = "reason"#, NameValueStr: "reason"),
         Ungated
     ),
-    (symbols::ignore, Normal, template!(Word, NameValueStr: "reason"), Ungated),
-    (symbols::no_implicit_prelude, Normal, template!(Word), Ungated),
-    (symbols::reexport_test_harness_main, Normal, template!(NameValueStr: "name"), Ungated),
-    (symbols::link_args, Normal, template!(NameValueStr: "args"), Gated(Stability::Unstable,
-                                "link_args",
+    (sym::ignore, Normal, template!(Word, NameValueStr: "reason"), Ungated),
+    (sym::no_implicit_prelude, Normal, template!(Word), Ungated),
+    (sym::reexport_test_harness_main, Normal, template!(NameValueStr: "name"), Ungated),
+    (sym::link_args, Normal, template!(NameValueStr: "args"), Gated(Stability::Unstable,
+                                sym::link_args,
                                 "the `link_args` attribute is experimental and not \
                                 portable across platforms, it is recommended to \
                                 use `#[link(name = \"foo\")] instead",
                                 cfg_fn!(link_args))),
-    (symbols::macro_escape, Normal, template!(Word), Ungated),
+    (sym::macro_escape, Normal, template!(Word), Ungated),
 
     // RFC #1445.
-    (symbols::structural_match, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "structural_match",
+    (sym::structural_match, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::structural_match,
                                             "the semantics of constant patterns is \
                                             not yet settled",
                                             cfg_fn!(structural_match))),
 
     // RFC #2008
-    (symbols::non_exhaustive, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                        "non_exhaustive",
+    (sym::non_exhaustive, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                        sym::non_exhaustive,
                                         "non exhaustive is an experimental feature",
                                         cfg_fn!(non_exhaustive))),
 
     // RFC #1268
-    (symbols::marker, Normal, template!(Word), Gated(Stability::Unstable,
-                            "marker_trait_attr",
+    (sym::marker, Normal, template!(Word), Gated(Stability::Unstable,
+                            sym::marker_trait_attr,
                             "marker traits is an experimental feature",
                             cfg_fn!(marker_trait_attr))),
 
-    (symbols::plugin, CrateLevel, template!(List: "name|name(args)"), Gated(Stability::Unstable,
-                                "plugin",
+    (sym::plugin, CrateLevel, template!(List: "name|name(args)"), Gated(Stability::Unstable,
+                                sym::plugin,
                                 "compiler plugins are experimental \
                                 and possibly buggy",
                                 cfg_fn!(plugin))),
 
-    (symbols::no_std, CrateLevel, template!(Word), Ungated),
-    (symbols::no_core, CrateLevel, template!(Word), Gated(Stability::Unstable,
-                                "no_core",
+    (sym::no_std, CrateLevel, template!(Word), Ungated),
+    (sym::no_core, CrateLevel, template!(Word), Gated(Stability::Unstable,
+                                sym::no_core,
                                 "no_core is experimental",
                                 cfg_fn!(no_core))),
-    (symbols::lang, Normal, template!(NameValueStr: "name"), Gated(Stability::Unstable,
-                        "lang_items",
+    (sym::lang, Normal, template!(NameValueStr: "name"), Gated(Stability::Unstable,
+                        sym::lang_items,
                         "language items are subject to change",
                         cfg_fn!(lang_items))),
-    (symbols::linkage, Whitelisted, template!(NameValueStr: "external|internal|..."),
+    (sym::linkage, Whitelisted, template!(NameValueStr: "external|internal|..."),
                                 Gated(Stability::Unstable,
-                                "linkage",
+                                sym::linkage,
                                 "the `linkage` attribute is experimental \
                                     and not portable across platforms",
                                 cfg_fn!(linkage))),
-    (symbols::thread_local, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                        "thread_local",
+    (sym::thread_local, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                        sym::thread_local,
                                         "`#[thread_local]` is an experimental feature, and does \
                                          not currently handle destructors",
                                         cfg_fn!(thread_local))),
 
-    (symbols::rustc_on_unimplemented, Whitelisted, template!(List:
+    (sym::rustc_on_unimplemented, Whitelisted, template!(List:
                         r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#,
                         NameValueStr: "message"),
                                             Gated(Stability::Unstable,
-                                            "on_unimplemented",
+                                            sym::on_unimplemented,
                                             "the `#[rustc_on_unimplemented]` attribute \
                                             is an experimental feature",
                                             cfg_fn!(on_unimplemented))),
-    (symbols::rustc_const_unstable, Normal, template!(List: r#"feature = "name""#),
+    (sym::rustc_const_unstable, Normal, template!(List: r#"feature = "name""#),
                                             Gated(Stability::Unstable,
-                                            "rustc_const_unstable",
+                                            sym::rustc_const_unstable,
                                             "the `#[rustc_const_unstable]` attribute \
                                             is an internal feature",
                                             cfg_fn!(rustc_const_unstable))),
-    (symbols::global_allocator, Normal, template!(Word), Ungated),
-    (symbols::default_lib_allocator, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "allocator_internals",
+    (sym::global_allocator, Normal, template!(Word), Ungated),
+    (sym::default_lib_allocator, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::allocator_internals,
                                             "the `#[default_lib_allocator]` \
                                             attribute is an experimental feature",
                                             cfg_fn!(allocator_internals))),
-    (symbols::needs_allocator, Normal, template!(Word), Gated(Stability::Unstable,
-                                    "allocator_internals",
+    (sym::needs_allocator, Normal, template!(Word), Gated(Stability::Unstable,
+                                    sym::allocator_internals,
                                     "the `#[needs_allocator]` \
                                     attribute is an experimental \
                                     feature",
                                     cfg_fn!(allocator_internals))),
-    (symbols::panic_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                        "panic_runtime",
+    (sym::panic_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                        sym::panic_runtime,
                                         "the `#[panic_runtime]` attribute is \
                                         an experimental feature",
                                         cfg_fn!(panic_runtime))),
-    (symbols::needs_panic_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "needs_panic_runtime",
+    (sym::needs_panic_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::needs_panic_runtime,
                                             "the `#[needs_panic_runtime]` \
                                                 attribute is an experimental \
                                                 feature",
                                             cfg_fn!(needs_panic_runtime))),
-    (symbols::rustc_outlives, Normal, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_outlives, Normal, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "the `#[rustc_outlives]` attribute \
                                     is just used for rustc unit tests \
                                     and will never be stable",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_variance, Normal, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_variance, Normal, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "the `#[rustc_variance]` attribute \
                                     is just used for rustc unit tests \
                                     and will never be stable",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_layout, Normal, template!(List: "field1, field2, ..."),
+    (sym::rustc_layout, Normal, template!(List: "field1, field2, ..."),
     Gated(Stability::Unstable,
-        "rustc_attrs",
+        sym::rustc_attrs,
         "the `#[rustc_layout]` attribute \
             is just used for rustc unit tests \
             and will never be stable",
         cfg_fn!(rustc_attrs))),
-    (symbols::rustc_layout_scalar_valid_range_start, Whitelisted, template!(List: "value"),
+    (sym::rustc_layout_scalar_valid_range_start, Whitelisted, template!(List: "value"),
     Gated(Stability::Unstable,
-        "rustc_attrs",
+        sym::rustc_attrs,
         "the `#[rustc_layout_scalar_valid_range_start]` attribute \
             is just used to enable niche optimizations in libcore \
             and will never be stable",
         cfg_fn!(rustc_attrs))),
-    (symbols::rustc_layout_scalar_valid_range_end, Whitelisted, template!(List: "value"),
+    (sym::rustc_layout_scalar_valid_range_end, Whitelisted, template!(List: "value"),
     Gated(Stability::Unstable,
-        "rustc_attrs",
+        sym::rustc_attrs,
         "the `#[rustc_layout_scalar_valid_range_end]` attribute \
             is just used to enable niche optimizations in libcore \
             and will never be stable",
         cfg_fn!(rustc_attrs))),
-    (symbols::rustc_regions, Normal, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_regions, Normal, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "the `#[rustc_regions]` attribute \
                                     is just used for rustc unit tests \
                                     and will never be stable",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_error, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_error, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "the `#[rustc_error]` attribute \
                                         is just used for rustc unit tests \
                                         and will never be stable",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_dump_user_substs, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_dump_user_substs, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "this attribute \
                                         is just used for rustc unit tests \
                                         and will never be stable",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_if_this_changed, Whitelisted, template!(Word, List: "DepNode"),
+    (sym::rustc_if_this_changed, Whitelisted, template!(Word, List: "DepNode"),
                                                 Gated(Stability::Unstable,
-                                                "rustc_attrs",
+                                                sym::rustc_attrs,
                                                 "the `#[rustc_if_this_changed]` attribute \
                                                 is just used for rustc unit tests \
                                                 and will never be stable",
                                                 cfg_fn!(rustc_attrs))),
-    (symbols::rustc_then_this_would_need, Whitelisted, template!(List: "DepNode"),
+    (sym::rustc_then_this_would_need, Whitelisted, template!(List: "DepNode"),
                                                     Gated(Stability::Unstable,
-                                                    "rustc_attrs",
+                                                    sym::rustc_attrs,
                                                     "the `#[rustc_if_this_changed]` attribute \
                                                     is just used for rustc unit tests \
                                                     and will never be stable",
                                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_dirty, Whitelisted, template!(List: r#"cfg = "...", /*opt*/ label = "...",
+    (sym::rustc_dirty, Whitelisted, template!(List: r#"cfg = "...", /*opt*/ label = "...",
                                                     /*opt*/ except = "...""#),
                                     Gated(Stability::Unstable,
-                                    "rustc_attrs",
+                                    sym::rustc_attrs,
                                     "the `#[rustc_dirty]` attribute \
                                         is just used for rustc unit tests \
                                         and will never be stable",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_clean, Whitelisted, template!(List: r#"cfg = "...", /*opt*/ label = "...",
+    (sym::rustc_clean, Whitelisted, template!(List: r#"cfg = "...", /*opt*/ label = "...",
                                                     /*opt*/ except = "...""#),
                                     Gated(Stability::Unstable,
-                                    "rustc_attrs",
+                                    sym::rustc_attrs,
                                     "the `#[rustc_clean]` attribute \
                                         is just used for rustc unit tests \
                                         and will never be stable",
                                     cfg_fn!(rustc_attrs))),
     (
-        symbols::rustc_partition_reused,
+        sym::rustc_partition_reused,
         Whitelisted,
         template!(List: r#"cfg = "...", module = "...""#),
         Gated(
             Stability::Unstable,
-            "rustc_attrs",
+            sym::rustc_attrs,
             "this attribute \
             is just used for rustc unit tests \
             and will never be stable",
@@ -1195,53 +1196,53 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
         )
     ),
     (
-        symbols::rustc_partition_codegened,
+        sym::rustc_partition_codegened,
         Whitelisted,
         template!(List: r#"cfg = "...", module = "...""#),
         Gated(
             Stability::Unstable,
-            "rustc_attrs",
+            sym::rustc_attrs,
             "this attribute \
             is just used for rustc unit tests \
             and will never be stable",
             cfg_fn!(rustc_attrs),
         )
     ),
-    (symbols::rustc_expected_cgu_reuse, Whitelisted, template!(List: r#"cfg = "...", module = "...",
+    (sym::rustc_expected_cgu_reuse, Whitelisted, template!(List: r#"cfg = "...", module = "...",
                                                             kind = "...""#),
                                                     Gated(Stability::Unstable,
-                                                    "rustc_attrs",
+                                                    sym::rustc_attrs,
                                                     "this attribute \
                                                     is just used for rustc unit tests \
                                                     and will never be stable",
                                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_synthetic, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                                    "rustc_attrs",
+    (sym::rustc_synthetic, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                                    sym::rustc_attrs,
                                                     "this attribute \
                                                     is just used for rustc unit tests \
                                                     and will never be stable",
                                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_symbol_name, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "rustc_attrs",
+    (sym::rustc_symbol_name, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::rustc_attrs,
                                             "internal rustc attributes will never be stable",
                                             cfg_fn!(rustc_attrs))),
-    (symbols::rustc_def_path, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                        "rustc_attrs",
+    (sym::rustc_def_path, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                        sym::rustc_attrs,
                                         "internal rustc attributes will never be stable",
                                         cfg_fn!(rustc_attrs))),
-    (symbols::rustc_mir, Whitelisted, template!(List: "arg1, arg2, ..."), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_mir, Whitelisted, template!(List: "arg1, arg2, ..."), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "the `#[rustc_mir]` attribute \
                                     is just used for rustc unit tests \
                                     and will never be stable",
                                     cfg_fn!(rustc_attrs))),
     (
-        symbols::rustc_inherit_overflow_checks,
+        sym::rustc_inherit_overflow_checks,
         Whitelisted,
         template!(Word),
         Gated(
             Stability::Unstable,
-            "rustc_attrs",
+            sym::rustc_attrs,
             "the `#[rustc_inherit_overflow_checks]` \
             attribute is just used to control \
             overflow checking behavior of several \
@@ -1251,71 +1252,71 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
         )
     ),
 
-    (symbols::rustc_dump_program_clauses, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                                    "rustc_attrs",
+    (sym::rustc_dump_program_clauses, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                                    sym::rustc_attrs,
                                                     "the `#[rustc_dump_program_clauses]` \
                                                     attribute is just used for rustc unit \
                                                     tests and will never be stable",
                                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_test_marker, Normal, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_test_marker, Normal, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "the `#[rustc_test_marker]` attribute \
                                     is used internally to track tests",
                                     cfg_fn!(rustc_attrs))),
-    (symbols::rustc_transparent_macro, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                                "rustc_attrs",
+    (sym::rustc_transparent_macro, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                                sym::rustc_attrs,
                                                 "used internally for testing macro hygiene",
                                                     cfg_fn!(rustc_attrs))),
-    (symbols::compiler_builtins, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "compiler_builtins",
+    (sym::compiler_builtins, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::compiler_builtins,
                                             "the `#[compiler_builtins]` attribute is used to \
                                             identify the `compiler_builtins` crate which \
                                             contains compiler-rt intrinsics and will never be \
                                             stable",
                                         cfg_fn!(compiler_builtins))),
-    (symbols::sanitizer_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "sanitizer_runtime",
+    (sym::sanitizer_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::sanitizer_runtime,
                                             "the `#[sanitizer_runtime]` attribute is used to \
                                             identify crates that contain the runtime of a \
                                             sanitizer and will never be stable",
                                             cfg_fn!(sanitizer_runtime))),
-    (symbols::profiler_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                            "profiler_runtime",
+    (sym::profiler_runtime, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                            sym::profiler_runtime,
                                             "the `#[profiler_runtime]` attribute is used to \
                                             identify the `profiler_builtins` crate which \
                                             contains the profiler runtime and will never be \
                                             stable",
                                             cfg_fn!(profiler_runtime))),
 
-    (symbols::allow_internal_unstable, Normal, template!(Word, List: "feat1, feat2, ..."),
+    (sym::allow_internal_unstable, Normal, template!(Word, List: "feat1, feat2, ..."),
                                             Gated(Stability::Unstable,
-                                            "allow_internal_unstable",
+                                            sym::allow_internal_unstable,
                                             EXPLAIN_ALLOW_INTERNAL_UNSTABLE,
                                             cfg_fn!(allow_internal_unstable))),
 
-    (symbols::allow_internal_unsafe, Normal, template!(Word), Gated(Stability::Unstable,
-                                            "allow_internal_unsafe",
+    (sym::allow_internal_unsafe, Normal, template!(Word), Gated(Stability::Unstable,
+                                            sym::allow_internal_unsafe,
                                             EXPLAIN_ALLOW_INTERNAL_UNSAFE,
                                             cfg_fn!(allow_internal_unsafe))),
 
-    (symbols::fundamental, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                    "fundamental",
+    (sym::fundamental, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                    sym::fundamental,
                                     "the `#[fundamental]` attribute \
                                         is an experimental feature",
                                     cfg_fn!(fundamental))),
 
-    (symbols::proc_macro_derive, Normal, template!(List: "TraitName, \
+    (sym::proc_macro_derive, Normal, template!(List: "TraitName, \
                                                 /*opt*/ attributes(name1, name2, ...)"),
                                     Ungated),
 
-    (symbols::rustc_copy_clone_marker, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                                "rustc_attrs",
+    (sym::rustc_copy_clone_marker, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                                sym::rustc_attrs,
                                                 "internal implementation detail",
                                                 cfg_fn!(rustc_attrs))),
 
     // FIXME: #14408 whitelist docs since rustdoc looks at them
     (
-        symbols::doc,
+        sym::doc,
         Whitelisted,
         template!(List: "hidden|inline|...", NameValueStr: "string"),
         Ungated
@@ -1323,94 +1324,94 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
 
     // FIXME: #14406 these are processed in codegen, which happens after the
     // lint pass
-    (symbols::cold, Whitelisted, template!(Word), Ungated),
-    (symbols::naked, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                "naked_functions",
+    (sym::cold, Whitelisted, template!(Word), Ungated),
+    (sym::naked, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                sym::naked_functions,
                                 "the `#[naked]` attribute \
                                 is an experimental feature",
                                 cfg_fn!(naked_functions))),
-    (symbols::ffi_returns_twice, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                "ffi_returns_twice",
+    (sym::ffi_returns_twice, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                sym::ffi_returns_twice,
                                 "the `#[ffi_returns_twice]` attribute \
                                 is an experimental feature",
                                 cfg_fn!(ffi_returns_twice))),
-    (symbols::target_feature, Whitelisted, template!(List: r#"enable = "name""#), Ungated),
-    (symbols::export_name, Whitelisted, template!(NameValueStr: "name"), Ungated),
-    (symbols::inline, Whitelisted, template!(Word, List: "always|never"), Ungated),
-    (symbols::link, Whitelisted, template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...",
+    (sym::target_feature, Whitelisted, template!(List: r#"enable = "name""#), Ungated),
+    (sym::export_name, Whitelisted, template!(NameValueStr: "name"), Ungated),
+    (sym::inline, Whitelisted, template!(Word, List: "always|never"), Ungated),
+    (sym::link, Whitelisted, template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...",
                                                /*opt*/ cfg = "...""#), Ungated),
-    (symbols::link_name, Whitelisted, template!(NameValueStr: "name"), Ungated),
-    (symbols::link_section, Whitelisted, template!(NameValueStr: "name"), Ungated),
-    (symbols::no_builtins, Whitelisted, template!(Word), Ungated),
-    (symbols::no_debug, Whitelisted, template!(Word), Gated(
+    (sym::link_name, Whitelisted, template!(NameValueStr: "name"), Ungated),
+    (sym::link_section, Whitelisted, template!(NameValueStr: "name"), Ungated),
+    (sym::no_builtins, Whitelisted, template!(Word), Ungated),
+    (sym::no_debug, Whitelisted, template!(Word), Gated(
         Stability::Deprecated("https://github.com/rust-lang/rust/issues/29721", None),
-        "no_debug",
+        sym::no_debug,
         "the `#[no_debug]` attribute was an experimental feature that has been \
         deprecated due to lack of demand",
         cfg_fn!(no_debug))),
     (
-        symbols::omit_gdb_pretty_printer_section,
+        sym::omit_gdb_pretty_printer_section,
         Whitelisted,
         template!(Word),
         Gated(
             Stability::Unstable,
-            "omit_gdb_pretty_printer_section",
+            sym::omit_gdb_pretty_printer_section,
             "the `#[omit_gdb_pretty_printer_section]` \
                 attribute is just used for the Rust test \
                 suite",
             cfg_fn!(omit_gdb_pretty_printer_section)
         )
     ),
-    (symbols::unsafe_destructor_blind_to_params,
+    (sym::unsafe_destructor_blind_to_params,
     Normal,
     template!(Word),
     Gated(Stability::Deprecated("https://github.com/rust-lang/rust/issues/34761",
                                 Some("replace this attribute with `#[may_dangle]`")),
-        "dropck_parametricity",
+        sym::dropck_parametricity,
         "unsafe_destructor_blind_to_params has been replaced by \
             may_dangle and will be removed in the future",
         cfg_fn!(dropck_parametricity))),
-    (symbols::may_dangle,
+    (sym::may_dangle,
     Normal,
     template!(Word),
     Gated(Stability::Unstable,
-        "dropck_eyepatch",
+        sym::dropck_eyepatch,
         "may_dangle has unstable semantics and may be removed in the future",
         cfg_fn!(dropck_eyepatch))),
-    (symbols::unwind, Whitelisted, template!(List: "allowed|aborts"), Gated(Stability::Unstable,
-                                "unwind_attributes",
+    (sym::unwind, Whitelisted, template!(List: "allowed|aborts"), Gated(Stability::Unstable,
+                                sym::unwind_attributes,
                                 "#[unwind] is experimental",
                                 cfg_fn!(unwind_attributes))),
-    (symbols::used, Whitelisted, template!(Word), Ungated),
+    (sym::used, Whitelisted, template!(Word), Ungated),
 
     // used in resolve
-    (symbols::prelude_import, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                        "prelude_import",
+    (sym::prelude_import, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                        sym::prelude_import,
                                         "`#[prelude_import]` is for use by rustc only",
                                         cfg_fn!(prelude_import))),
 
     // FIXME: #14407 these are only looked at on-demand so we can't
     // guarantee they'll have already been checked
     (
-        symbols::rustc_deprecated,
+        sym::rustc_deprecated,
         Whitelisted,
         template!(List: r#"since = "version", reason = "...""#),
         Ungated
     ),
-    (symbols::must_use, Whitelisted, template!(Word, NameValueStr: "reason"), Ungated),
+    (sym::must_use, Whitelisted, template!(Word, NameValueStr: "reason"), Ungated),
     (
-        symbols::stable,
+        sym::stable,
         Whitelisted,
         template!(List: r#"feature = "name", since = "version""#),
         Ungated
     ),
     (
-        symbols::unstable,
+        sym::unstable,
         Whitelisted,
         template!(List: r#"feature = "name", reason = "...", issue = "N""#),
         Ungated
     ),
-    (symbols::deprecated,
+    (sym::deprecated,
         Normal,
         template!(
             Word,
@@ -1420,70 +1421,71 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
         Ungated
     ),
 
-    (symbols::rustc_paren_sugar, Normal, template!(Word), Gated(Stability::Unstable,
-                                        "unboxed_closures",
+    (sym::rustc_paren_sugar, Normal, template!(Word), Gated(Stability::Unstable,
+                                        sym::unboxed_closures,
                                         "unboxed_closures are still evolving",
                                         cfg_fn!(unboxed_closures))),
 
-    (symbols::windows_subsystem, Whitelisted, template!(NameValueStr: "windows|console"), Ungated),
+    (sym::windows_subsystem, Whitelisted, template!(NameValueStr: "windows|console"), Ungated),
 
-    (symbols::proc_macro_attribute, Normal, template!(Word), Ungated),
-    (symbols::proc_macro, Normal, template!(Word), Ungated),
+    (sym::proc_macro_attribute, Normal, template!(Word), Ungated),
+    (sym::proc_macro, Normal, template!(Word), Ungated),
 
-    (symbols::rustc_proc_macro_decls, Normal, template!(Word), Gated(Stability::Unstable,
-                                            "rustc_attrs",
+    (sym::rustc_proc_macro_decls, Normal, template!(Word), Gated(Stability::Unstable,
+                                            sym::rustc_attrs,
                                             "used internally by rustc",
                                             cfg_fn!(rustc_attrs))),
 
-    (symbols::allow_fail, Normal, template!(Word), Gated(Stability::Unstable,
-                                "allow_fail",
+    (sym::allow_fail, Normal, template!(Word), Gated(Stability::Unstable,
+                                sym::allow_fail,
                                 "allow_fail attribute is currently unstable",
                                 cfg_fn!(allow_fail))),
 
-    (symbols::rustc_std_internal_symbol, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                    "rustc_attrs",
+    (sym::rustc_std_internal_symbol, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                    sym::rustc_attrs,
                                     "this is an internal attribute that will \
                                     never be stable",
                                     cfg_fn!(rustc_attrs))),
 
     // whitelists "identity-like" conversion methods to suggest on type mismatch
-    (symbols::rustc_conversion_suggestion, Whitelisted, template!(Word), Gated(Stability::Unstable,
-                                                    "rustc_attrs",
+    (sym::rustc_conversion_suggestion, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                                    sym::rustc_attrs,
                                                     "this is an internal attribute that will \
                                                         never be stable",
                                                     cfg_fn!(rustc_attrs))),
 
     (
-        symbols::rustc_args_required_const,
+        sym::rustc_args_required_const,
         Whitelisted,
         template!(List: "N"),
-        Gated(Stability::Unstable, "rustc_attrs", "never will be stable", cfg_fn!(rustc_attrs))
+        Gated(Stability::Unstable, sym::rustc_attrs, "never will be stable",
+           cfg_fn!(rustc_attrs))
     ),
     // RFC 2070
-    (symbols::panic_handler, Normal, template!(Word), Ungated),
+    (sym::panic_handler, Normal, template!(Word), Ungated),
 
-    (symbols::alloc_error_handler, Normal, template!(Word), Gated(Stability::Unstable,
-                        "alloc_error_handler",
+    (sym::alloc_error_handler, Normal, template!(Word), Gated(Stability::Unstable,
+                        sym::alloc_error_handler,
                         "#[alloc_error_handler] is an unstable feature",
                         cfg_fn!(alloc_error_handler))),
 
     // RFC 2412
-    (symbols::optimize, Whitelisted, template!(List: "size|speed"), Gated(Stability::Unstable,
-                            "optimize_attribute",
+    (sym::optimize, Whitelisted, template!(List: "size|speed"), Gated(Stability::Unstable,
+                            sym::optimize_attribute,
                             "#[optimize] attribute is an unstable feature",
                             cfg_fn!(optimize_attribute))),
 
     // Crate level attributes
-    (symbols::crate_name, CrateLevel, template!(NameValueStr: "name"), Ungated),
-    (symbols::crate_type, CrateLevel, template!(NameValueStr: "bin|lib|..."), Ungated),
-    (symbols::crate_id, CrateLevel, template!(NameValueStr: "ignored"), Ungated),
-    (symbols::feature, CrateLevel, template!(List: "name1, name1, ..."), Ungated),
-    (symbols::no_start, CrateLevel, template!(Word), Ungated),
-    (symbols::no_main, CrateLevel, template!(Word), Ungated),
-    (symbols::recursion_limit, CrateLevel, template!(NameValueStr: "N"), Ungated),
-    (symbols::type_length_limit, CrateLevel, template!(NameValueStr: "N"), Ungated),
-    (symbols::test_runner, CrateLevel, template!(List: "path"), Gated(Stability::Unstable,
-                    "custom_test_frameworks",
+    (sym::crate_name, CrateLevel, template!(NameValueStr: "name"), Ungated),
+    (sym::crate_type, CrateLevel, template!(NameValueStr: "bin|lib|..."), Ungated),
+    (sym::crate_id, CrateLevel, template!(NameValueStr: "ignored"), Ungated),
+    (sym::feature, CrateLevel, template!(List: "name1, name1, ..."), Ungated),
+    (sym::no_start, CrateLevel, template!(Word), Ungated),
+    (sym::no_main, CrateLevel, template!(Word), Ungated),
+    (sym::recursion_limit, CrateLevel, template!(NameValueStr: "N"), Ungated),
+    (sym::type_length_limit, CrateLevel, template!(NameValueStr: "N"), Ungated),
+    (sym::test_runner, CrateLevel, template!(List: "path"), Gated(Stability::Unstable,
+                    sym::custom_test_frameworks,
                     EXPLAIN_CUSTOM_TEST_FRAMEWORKS,
                     cfg_fn!(custom_test_frameworks))),
 ];
@@ -1503,11 +1505,11 @@ lazy_static! {
 }
 
 // cfg(...)'s that are feature gated
-const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(Symbol, Symbol, fn(&Features) -> bool)] = &[
     // (name in cfg, feature, function to check if the feature is enabled)
-    ("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)),
-    ("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)),
-    ("rustdoc", "doc_cfg", cfg_fn!(doc_cfg)),
+    (sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)),
+    (sym::target_has_atomic, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)),
+    (sym::rustdoc, sym::doc_cfg, cfg_fn!(doc_cfg)),
 ];
 
 #[derive(Debug)]
@@ -1540,7 +1542,7 @@ impl GatedCfg {
 struct Context<'a> {
     features: &'a Features,
     parse_sess: &'a ParseSess,
-    plugin_attributes: &'a [(String, AttributeType)],
+    plugin_attributes: &'a [(Symbol, AttributeType)],
 }
 
 macro_rules! gate_feature_fn {
@@ -1559,11 +1561,11 @@ macro_rules! gate_feature_fn {
 macro_rules! gate_feature {
     ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {
         gate_feature_fn!($cx, |x:&Features| x.$feature, $span,
-                         stringify!($feature), $explain, GateStrength::Hard)
+                         sym::$feature, $explain, GateStrength::Hard)
     };
     ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => {
         gate_feature_fn!($cx, |x:&Features| x.$feature, $span,
-                         stringify!($feature), $explain, $level)
+                         sym::$feature, $explain, $level)
     };
 }
 
@@ -1582,9 +1584,9 @@ impl<'a> Context<'a> {
                         self, has_feature, attr.span, name, desc, GateStrength::Hard
                     );
                 }
-            } else if name == symbols::doc {
+            } else if name == sym::doc {
                 if let Some(content) = attr.meta_item_list() {
-                    if content.iter().any(|c| c.check_name(symbols::include)) {
+                    if content.iter().any(|c| c.check_name(sym::include)) {
                         gate_feature!(self, external_doc, attr.span,
                             "#[doc(include = \"...\")] is experimental"
                         );
@@ -1594,8 +1596,8 @@ impl<'a> Context<'a> {
             debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
             return;
         }
-        for &(ref n, ref ty) in self.plugin_attributes {
-            if attr.path == &**n {
+        for &(n, ty) in self.plugin_attributes {
+            if attr.path == n {
                 // Plugins can't gate attributes, so we don't check for it
                 // unlike the code above; we only use this loop to
                 // short-circuit to avoid the checks below.
@@ -1604,7 +1606,7 @@ impl<'a> Context<'a> {
             }
         }
         if !attr::is_known(attr) {
-            if attr.name_or_empty().starts_with("rustc_") {
+            if attr.name_or_empty().as_str().starts_with("rustc_") {
                 let msg = "unless otherwise specified, attributes with the prefix `rustc_` \
                            are reserved for internal compiler diagnostics";
                 gate_feature!(self, rustc_attrs, attr.span, msg);
@@ -1629,7 +1631,7 @@ pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features:
     );
 }
 
-fn find_lang_feature_issue(feature: &str) -> Option<u32> {
+fn find_lang_feature_issue(feature: Symbol) -> Option<u32> {
     if let Some(info) = ACTIVE_FEATURES.iter().find(|t| t.0 == feature) {
         let issue = info.2;
         // FIXME (#28244): enforce that active features have issue numbers
@@ -1661,7 +1663,7 @@ pub enum GateStrength {
 
 pub fn emit_feature_err(
     sess: &ParseSess,
-    feature: &str,
+    feature: Symbol,
     span: Span,
     issue: GateIssue,
     explain: &str,
@@ -1671,7 +1673,7 @@ pub fn emit_feature_err(
 
 pub fn feature_err<'a>(
     sess: &'a ParseSess,
-    feature: &str,
+    feature: Symbol,
     span: Span,
     issue: GateIssue,
     explain: &str,
@@ -1681,7 +1683,7 @@ pub fn feature_err<'a>(
 
 fn leveled_feature_err<'a>(
     sess: &'a ParseSess,
-    feature: &str,
+    feature: Symbol,
     span: Span,
     issue: GateIssue,
     explain: &str,
@@ -1769,13 +1771,13 @@ struct PostExpansionVisitor<'a> {
 macro_rules! gate_feature_post {
     ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{
         let (cx, span) = ($cx, $span);
-        if !span.allows_unstable(stringify!($feature)) {
+        if !span.allows_unstable(sym::$feature) {
             gate_feature!(cx.context, $feature, span, $explain)
         }
     }};
     ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => {{
         let (cx, span) = ($cx, $span);
-        if !span.allows_unstable(stringify!($feature)) {
+        if !span.allows_unstable(sym::$feature) {
             gate_feature!(cx.context, $feature, span, $explain, $level)
         }
     }}
@@ -1841,11 +1843,11 @@ impl<'a> PostExpansionVisitor<'a> {
                                template: AttributeTemplate) {
         // Some special attributes like `cfg` must be checked
         // before the generic check, so we skip them here.
-        let should_skip = |name| name == symbols::cfg;
+        let should_skip = |name| name == sym::cfg;
         // Some of previously accepted forms were used in practice,
         // report them as warnings for now.
-        let should_warn = |name| name == symbols::doc || name == symbols::ignore ||
-                                 name == symbols::inline || name == symbols::link;
+        let should_warn = |name| name == sym::doc || name == sym::ignore ||
+                                 name == sym::inline || name == sym::link;
 
         match attr.parse_meta(self.context.parse_sess) {
             Ok(meta) => if !should_skip(name) && !template.compatible(&meta.node) {
@@ -1893,25 +1895,25 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
         // check for gated attributes
         self.context.check_attribute(attr, attr_info, false);
 
-        if attr.check_name(symbols::doc) {
+        if attr.check_name(sym::doc) {
             if let Some(content) = attr.meta_item_list() {
-                if content.len() == 1 && content[0].check_name(symbols::cfg) {
+                if content.len() == 1 && content[0].check_name(sym::cfg) {
                     gate_feature_post!(&self, doc_cfg, attr.span,
                         "#[doc(cfg(...))] is experimental"
                     );
-                } else if content.iter().any(|c| c.check_name(symbols::masked)) {
+                } else if content.iter().any(|c| c.check_name(sym::masked)) {
                     gate_feature_post!(&self, doc_masked, attr.span,
                         "#[doc(masked)] is experimental"
                     );
-                } else if content.iter().any(|c| c.check_name(symbols::spotlight)) {
+                } else if content.iter().any(|c| c.check_name(sym::spotlight)) {
                     gate_feature_post!(&self, doc_spotlight, attr.span,
                         "#[doc(spotlight)] is experimental"
                     );
-                } else if content.iter().any(|c| c.check_name(symbols::alias)) {
+                } else if content.iter().any(|c| c.check_name(sym::alias)) {
                     gate_feature_post!(&self, doc_alias, attr.span,
                         "#[doc(alias = \"...\")] is experimental"
                     );
-                } else if content.iter().any(|c| c.check_name(symbols::keyword)) {
+                } else if content.iter().any(|c| c.check_name(sym::keyword)) {
                     gate_feature_post!(&self, doc_keyword, attr.span,
                         "#[doc(keyword = \"...\")] is experimental"
                     );
@@ -1946,7 +1948,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
     fn visit_item(&mut self, i: &'a ast::Item) {
         match i.node {
             ast::ItemKind::Const(_,_) => {
-                if i.ident.name == "_" {
+                if i.ident.name == keywords::Underscore.name() {
                     gate_feature_post!(&self, underscore_const_names, i.span,
                                         "naming constants with `_` is unstable");
                 }
@@ -1957,17 +1959,17 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
             }
 
             ast::ItemKind::Fn(..) => {
-                if attr::contains_name(&i.attrs[..], "plugin_registrar") {
+                if attr::contains_name(&i.attrs[..], sym::plugin_registrar) {
                     gate_feature_post!(&self, plugin_registrar, i.span,
                                        "compiler plugins are experimental and possibly buggy");
                 }
-                if attr::contains_name(&i.attrs[..], "start") {
+                if attr::contains_name(&i.attrs[..], sym::start) {
                     gate_feature_post!(&self, start, i.span,
                                       "a #[start] function is an experimental \
                                        feature whose signature may change \
                                        over time");
                 }
-                if attr::contains_name(&i.attrs[..], "main") {
+                if attr::contains_name(&i.attrs[..], sym::main) {
                     gate_feature_post!(&self, main, i.span,
                                        "declaration of a nonstandard #[main] \
                                         function may change over time, for now \
@@ -1976,9 +1978,9 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
             }
 
             ast::ItemKind::Struct(..) => {
-                for attr in attr::filter_by_name(&i.attrs[..], "repr") {
+                for attr in attr::filter_by_name(&i.attrs[..], sym::repr) {
                     for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
-                        if item.check_name(symbols::simd) {
+                        if item.check_name(sym::simd) {
                             gate_feature_post!(&self, repr_simd, attr.span,
                                                "SIMD types are experimental and possibly buggy");
                         }
@@ -1987,9 +1989,9 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
             }
 
             ast::ItemKind::Enum(..) => {
-                for attr in attr::filter_by_name(&i.attrs[..], "repr") {
+                for attr in attr::filter_by_name(&i.attrs[..], sym::repr) {
                     for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
-                        if item.check_name(symbols::align) {
+                        if item.check_name(sym::align) {
                             gate_feature_post!(&self, repr_align_enum, attr.span,
                                                "`#[repr(align(x))]` on enums is experimental");
                         }
@@ -2051,7 +2053,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
         match i.node {
             ast::ForeignItemKind::Fn(..) |
             ast::ForeignItemKind::Static(..) => {
-                let link_name = attr::first_attr_value_str_by_name(&i.attrs, "link_name");
+                let link_name = attr::first_attr_value_str_by_name(&i.attrs, sym::link_name);
                 let links_to_llvm = match link_name {
                     Some(val) => val.as_str().starts_with("llvm."),
                     _ => false
@@ -2303,7 +2305,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
         if edition <= crate_edition {
             // The `crate_edition` implies its respective umbrella feature-gate
             // (i.e., `#![feature(rust_20XX_preview)]` isn't needed on edition 20XX).
-            edition_enabled_features.insert(Symbol::intern(edition.feature_name()), edition);
+            edition_enabled_features.insert(edition.feature_name(), edition);
         }
     }
 
@@ -2311,7 +2313,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
         if let Some(f_edition) = f_edition {
             if f_edition <= crate_edition {
                 set(&mut features, DUMMY_SP);
-                edition_enabled_features.insert(Symbol::intern(name), crate_edition);
+                edition_enabled_features.insert(name, crate_edition);
             }
         }
     }
@@ -2319,7 +2321,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
     // Process the edition umbrella feature-gates first, to ensure
     // `edition_enabled_features` is completed before it's queried.
     for attr in krate_attrs {
-        if !attr.check_name(symbols::feature) {
+        if !attr.check_name(sym::feature) {
             continue
         }
 
@@ -2355,7 +2357,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
                             // FIXME(Manishearth) there is currently no way to set
                             // lib features by edition
                             set(&mut features, DUMMY_SP);
-                            edition_enabled_features.insert(Symbol::intern(name), *edition);
+                            edition_enabled_features.insert(name, *edition);
                         }
                     }
                 }
@@ -2364,7 +2366,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
     }
 
     for attr in krate_attrs {
-        if !attr.check_name(symbols::feature) {
+        if !attr.check_name(sym::feature) {
             continue
         }
 
@@ -2438,7 +2440,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
 pub fn check_crate(krate: &ast::Crate,
                    sess: &ParseSess,
                    features: &Features,
-                   plugin_attributes: &[(String, AttributeType)],
+                   plugin_attributes: &[(Symbol, AttributeType)],
                    unstable: UnstableFeatures) {
     maybe_stage_features(&sess.span_diagnostic, krate, unstable);
     let ctx = Context {
@@ -2496,7 +2498,7 @@ fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate,
     };
     if !allow_features {
         for attr in &krate.attrs {
-            if attr.check_name(symbols::feature) {
+            if attr.check_name(sym::feature) {
                 let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)");
                 span_err!(span_handler, attr.span, E0554,
                           "#![feature] may not be used on the {} release channel",
diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs
index 6eb8b1b5004..f587e63e12b 100644
--- a/src/libsyntax/mut_visit.rs
+++ b/src/libsyntax/mut_visit.rs
@@ -663,7 +663,6 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
         token::NtMeta(meta) => vis.visit_meta_item(meta),
         token::NtPath(path) => vis.visit_path(path),
         token::NtTT(tt) => vis.visit_tt(tt),
-        token::NtArm(arm) => vis.visit_arm(arm),
         token::NtImplItem(item) =>
             visit_clobber(item, |item| {
                 // See reasoning above.
@@ -676,9 +675,6 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
                 vis.flat_map_trait_item(item)
                     .expect_one("expected visitor to produce exactly one item")
             }),
-        token::NtGenerics(generics) => vis.visit_generics(generics),
-        token::NtWhereClause(where_clause) => vis.visit_where_clause(where_clause),
-        token::NtArg(arg) => vis.visit_arg(arg),
         token::NtVis(visib) => vis.visit_vis(visib),
         token::NtForeignItem(item) =>
             visit_clobber(item, |item| {
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index b4103440e35..dfd6f451c28 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -25,16 +25,3 @@ pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool {
         _ => true,
     }
 }
-
-/// this statement requires a semicolon after it.
-/// note that in one case (`stmt_semi`), we've already
-/// seen the semicolon, and thus don't need another.
-pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
-    match *stmt {
-        ast::StmtKind::Local(_) => true,
-        ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
-        ast::StmtKind::Item(_) |
-        ast::StmtKind::Semi(..) |
-        ast::StmtKind::Mac(..) => false,
-    }
-}
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index 32e1ee94f0d..1a2393be806 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -1,14 +1,16 @@
 use crate::ast;
-use crate::ast::{Expr, ExprKind, Item, ItemKind, Pat, PatKind, QSelf, Ty, TyKind};
-use crate::parse::parser::PathStyle;
+use crate::ast::{BlockCheckMode, Expr, ExprKind, Item, ItemKind, Pat, PatKind, QSelf, Ty, TyKind};
+use crate::parse::parser::{BlockMode, PathStyle, TokenType, SemiColonMode};
 use crate::parse::token;
 use crate::parse::PResult;
 use crate::parse::Parser;
 use crate::print::pprust;
 use crate::ptr::P;
+use crate::symbol::keywords;
 use crate::ThinVec;
-use errors::Applicability;
+use errors::{Applicability, DiagnosticBuilder};
 use syntax_pos::Span;
+use log::debug;
 
 pub trait RecoverQPath: Sized + 'static {
     const PATH_STYLE: PathStyle = PathStyle::Expr;
@@ -223,4 +225,300 @@ impl<'a> Parser<'a> {
             false
         }
     }
+
+    /// Consume alternative await syntaxes like `await <expr>`, `await? <expr>`, `await(<expr>)`
+    /// and `await { <expr> }`.
+    crate fn parse_incorrect_await_syntax(
+        &mut self,
+        lo: Span,
+        await_sp: Span,
+    ) -> PResult<'a, (Span, ExprKind)> {
+        let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
+        let expr = if self.token == token::OpenDelim(token::Brace) {
+            // Handle `await { <expr> }`.
+            // This needs to be handled separatedly from the next arm to avoid
+            // interpreting `await { <expr> }?` as `<expr>?.await`.
+            self.parse_block_expr(
+                None,
+                self.span,
+                BlockCheckMode::Default,
+                ThinVec::new(),
+            )
+        } else {
+            self.parse_expr()
+        }.map_err(|mut err| {
+            err.span_label(await_sp, "while parsing this incorrect await expression");
+            err
+        })?;
+        let expr_str = self.sess.source_map().span_to_snippet(expr.span)
+            .unwrap_or_else(|_| pprust::expr_to_string(&expr));
+        let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
+        let sp = lo.to(expr.span);
+        let app = match expr.node {
+            ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
+            _ => Applicability::MachineApplicable,
+        };
+        self.struct_span_err(sp, "incorrect use of `await`")
+            .span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
+            .emit();
+        Ok((sp, ExprKind::Await(ast::AwaitOrigin::FieldLike, expr)))
+    }
+
+    /// If encountering `future.await()`, consume and emit error.
+    crate fn recover_from_await_method_call(&mut self) {
+        if self.token == token::OpenDelim(token::Paren) &&
+            self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
+        {
+            // future.await()
+            let lo = self.span;
+            self.bump(); // (
+            let sp = lo.to(self.span);
+            self.bump(); // )
+            self.struct_span_err(sp, "incorrect use of `await`")
+                .span_suggestion(
+                    sp,
+                    "`await` is not a method call, remove the parentheses",
+                    String::new(),
+                    Applicability::MachineApplicable,
+                ).emit()
+        }
+    }
+
+    crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
+        self.token.is_ident() &&
+            if let ast::ExprKind::Path(..) = node { true } else { false } &&
+            !self.token.is_reserved_ident() &&           // v `foo:bar(baz)`
+            self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) ||
+            self.look_ahead(1, |t| t == &token::Lt) &&     // `foo:bar<baz`
+            self.look_ahead(2, |t| t.is_ident()) ||
+            self.look_ahead(1, |t| t == &token::Colon) &&  // `foo:bar:baz`
+            self.look_ahead(2, |t| t.is_ident()) ||
+            self.look_ahead(1, |t| t == &token::ModSep) &&  // `foo:bar::baz`
+            self.look_ahead(2, |t| t.is_ident())
+    }
+
+    crate fn bad_type_ascription(
+        &self,
+        err: &mut DiagnosticBuilder<'a>,
+        lhs_span: Span,
+        cur_op_span: Span,
+        next_sp: Span,
+        maybe_path: bool,
+    ) {
+        err.span_label(self.span, "expecting a type here because of type ascription");
+        let cm = self.sess.source_map();
+        let next_pos = cm.lookup_char_pos(next_sp.lo());
+        let op_pos = cm.lookup_char_pos(cur_op_span.hi());
+        if op_pos.line != next_pos.line {
+            err.span_suggestion(
+                cur_op_span,
+                "try using a semicolon",
+                ";".to_string(),
+                Applicability::MaybeIncorrect,
+            );
+        } else {
+            if maybe_path {
+                err.span_suggestion(
+                    cur_op_span,
+                    "maybe you meant to write a path separator here",
+                    "::".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            } else {
+                err.note("type ascription is a nightly-only feature that lets \
+                          you annotate an expression with a type: `<expr>: <type>`")
+                    .span_note(
+                        lhs_span,
+                        "this expression expects an ascribed type after the colon",
+                    )
+                    .help("this might be indicative of a syntax error elsewhere");
+            }
+        }
+    }
+
+    crate fn recover_seq_parse_error(
+        &mut self,
+        delim: token::DelimToken,
+        lo: Span,
+        result: PResult<'a, P<Expr>>,
+    ) -> P<Expr> {
+        match result {
+            Ok(x) => x,
+            Err(mut err) => {
+                err.emit();
+                // recover from parse error
+                self.consume_block(delim);
+                self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
+            }
+        }
+    }
+
+    crate fn recover_closing_delimiter(
+        &mut self,
+        tokens: &[token::Token],
+        mut err: DiagnosticBuilder<'a>,
+    ) -> PResult<'a, bool> {
+        let mut pos = None;
+        // we want to use the last closing delim that would apply
+        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+                && Some(self.span) > unmatched.unclosed_span
+            {
+                pos = Some(i);
+            }
+        }
+        match pos {
+            Some(pos) => {
+                // Recover and assume that the detected unclosed delimiter was meant for
+                // this location. Emit the diagnostic and act as if the delimiter was
+                // present for the parser's sake.
+
+                 // Don't attempt to recover from this unclosed delimiter more than once.
+                let unmatched = self.unclosed_delims.remove(pos);
+                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+
+                 // We want to suggest the inclusion of the closing delimiter where it makes
+                // the most sense, which is immediately after the last token:
+                //
+                //  {foo(bar {}}
+                //      -      ^
+                //      |      |
+                //      |      help: `)` may belong here (FIXME: #58270)
+                //      |
+                //      unclosed delimiter
+                if let Some(sp) = unmatched.unclosed_span {
+                    err.span_label(sp, "unclosed delimiter");
+                }
+                err.span_suggestion_short(
+                    self.sess.source_map().next_point(self.prev_span),
+                    &format!("{} may belong here", delim.to_string()),
+                    delim.to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                self.expected_tokens.clear();  // reduce errors
+                Ok(true)
+            }
+            _ => Err(err),
+        }
+    }
+
+    /// Recover from `pub` keyword in places where it seems _reasonable_ but isn't valid.
+    crate fn eat_bad_pub(&mut self) {
+        if self.token.is_keyword(keywords::Pub) {
+            match self.parse_visibility(false) {
+                Ok(vis) => {
+                    self.diagnostic()
+                        .struct_span_err(vis.span, "unnecessary visibility qualifier")
+                        .span_label(vis.span, "`pub` not permitted here")
+                        .emit();
+                }
+                Err(mut err) => err.emit(),
+            }
+        }
+    }
+
+    // Eat tokens until we can be relatively sure we reached the end of the
+    // statement. This is something of a best-effort heuristic.
+    //
+    // We terminate when we find an unmatched `}` (without consuming it).
+    crate fn recover_stmt(&mut self) {
+        self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
+    }
+
+    // If `break_on_semi` is `Break`, then we will stop consuming tokens after
+    // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
+    // approximate - it can mean we break too early due to macros, but that
+    // should only lead to sub-optimal recovery, not inaccurate parsing).
+    //
+    // If `break_on_block` is `Break`, then we will stop consuming tokens
+    // after finding (and consuming) a brace-delimited block.
+    crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
+        let mut brace_depth = 0;
+        let mut bracket_depth = 0;
+        let mut in_block = false;
+        debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
+               break_on_semi, break_on_block);
+        loop {
+            debug!("recover_stmt_ loop {:?}", self.token);
+            match self.token {
+                token::OpenDelim(token::DelimToken::Brace) => {
+                    brace_depth += 1;
+                    self.bump();
+                    if break_on_block == BlockMode::Break &&
+                       brace_depth == 1 &&
+                       bracket_depth == 0 {
+                        in_block = true;
+                    }
+                }
+                token::OpenDelim(token::DelimToken::Bracket) => {
+                    bracket_depth += 1;
+                    self.bump();
+                }
+                token::CloseDelim(token::DelimToken::Brace) => {
+                    if brace_depth == 0 {
+                        debug!("recover_stmt_ return - close delim {:?}", self.token);
+                        break;
+                    }
+                    brace_depth -= 1;
+                    self.bump();
+                    if in_block && bracket_depth == 0 && brace_depth == 0 {
+                        debug!("recover_stmt_ return - block end {:?}", self.token);
+                        break;
+                    }
+                }
+                token::CloseDelim(token::DelimToken::Bracket) => {
+                    bracket_depth -= 1;
+                    if bracket_depth < 0 {
+                        bracket_depth = 0;
+                    }
+                    self.bump();
+                }
+                token::Eof => {
+                    debug!("recover_stmt_ return - Eof");
+                    break;
+                }
+                token::Semi => {
+                    self.bump();
+                    if break_on_semi == SemiColonMode::Break &&
+                       brace_depth == 0 &&
+                       bracket_depth == 0 {
+                        debug!("recover_stmt_ return - Semi");
+                        break;
+                    }
+                }
+                token::Comma if break_on_semi == SemiColonMode::Comma &&
+                       brace_depth == 0 &&
+                       bracket_depth == 0 =>
+                {
+                    debug!("recover_stmt_ return - Semi");
+                    break;
+                }
+                _ => {
+                    self.bump()
+                }
+            }
+        }
+    }
+
+    crate fn consume_block(&mut self, delim: token::DelimToken) {
+        let mut brace_depth = 0;
+        loop {
+            if self.eat(&token::OpenDelim(delim)) {
+                brace_depth += 1;
+            } else if self.eat(&token::CloseDelim(delim)) {
+                if brace_depth == 0 {
+                    return;
+                } else {
+                    brace_depth -= 1;
+                    continue;
+                }
+            } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
+                return;
+            } else {
+                self.bump();
+            }
+        }
+    }
+
 }
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 74fff3324ea..97d3fc002e9 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -3,8 +3,7 @@ pub use CommentStyle::*;
 use crate::ast;
 use crate::source_map::SourceMap;
 use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
-use crate::parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
-use crate::print::pprust;
+use crate::parse::lexer::{self, ParseSess, StringReader};
 
 use syntax_pos::{BytePos, CharPos, Pos, FileName};
 use log::debug;
@@ -339,16 +338,9 @@ fn consume_comment(rdr: &mut StringReader<'_>,
     debug!("<<< consume comment");
 }
 
-#[derive(Clone)]
-pub struct Literal {
-    pub lit: String,
-    pub pos: BytePos,
-}
-
 // it appears this function is called only from pprust... that's
 // probably not a good thing.
-pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut dyn Read)
-    -> (Vec<Comment>, Vec<Literal>)
+pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) -> Vec<Comment>
 {
     let mut src = String::new();
     srdr.read_to_string(&mut src).unwrap();
@@ -357,7 +349,6 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
     let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
 
     let mut comments: Vec<Comment> = Vec::new();
-    let mut literals: Vec<Literal> = Vec::new();
     let mut code_to_the_left = false; // Only code
     let mut anything_to_the_left = false; // Code or comments
 
@@ -382,26 +373,12 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
             }
         }
 
-        let bstart = rdr.pos;
         rdr.next_token();
-        // discard, and look ahead; we're working with internal state
-        let TokenAndSpan { tok, sp } = rdr.peek();
-        if tok.is_lit() {
-            rdr.with_str_from(bstart, |s| {
-                debug!("tok lit: {}", s);
-                literals.push(Literal {
-                    lit: s.to_string(),
-                    pos: sp.lo(),
-                });
-            })
-        } else {
-            debug!("tok: {}", pprust::token_to_string(&tok));
-        }
         code_to_the_left = true;
         anything_to_the_left = true;
     }
 
-    (comments, literals)
+    comments
 }
 
 #[cfg(test)]
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 2882acb0e78..47da3ee6a6c 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -62,19 +62,7 @@ pub struct StringReader<'a> {
     // cache a direct reference to the source text, so that we don't have to
     // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time.
     src: Lrc<String>,
-    token: token::Token,
-    span: Span,
-    /// The raw source span which *does not* take `override_span` into account
-    span_src_raw: Span,
-    /// Stack of open delimiters and their spans. Used for error message.
-    open_braces: Vec<(token::DelimToken, Span)>,
-    crate unmatched_braces: Vec<UnmatchedBrace>,
-    /// The type and spans for all braces
-    ///
-    /// Used only for error recovery when arriving to EOF with mismatched braces.
-    matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
-    crate override_span: Option<Span>,
-    last_unclosed_found_span: Option<Span>,
+    override_span: Option<Span>,
 }
 
 impl<'a> StringReader<'a> {
@@ -121,8 +109,6 @@ impl<'a> StringReader<'a> {
             sp: self.peek_span,
         };
         self.advance_token()?;
-        self.span_src_raw = self.peek_span_src_raw;
-
         Ok(ret_val)
     }
 
@@ -159,9 +145,6 @@ impl<'a> StringReader<'a> {
             }
         }
 
-        self.token = t.tok.clone();
-        self.span = t.sp;
-
         Ok(t)
     }
 
@@ -251,29 +234,10 @@ impl<'a> StringReader<'a> {
             peek_span_src_raw: syntax_pos::DUMMY_SP,
             src,
             fatal_errs: Vec::new(),
-            token: token::Eof,
-            span: syntax_pos::DUMMY_SP,
-            span_src_raw: syntax_pos::DUMMY_SP,
-            open_braces: Vec::new(),
-            unmatched_braces: Vec::new(),
-            matching_delim_spans: Vec::new(),
             override_span,
-            last_unclosed_found_span: None,
         }
     }
 
-    pub fn new(sess: &'a ParseSess,
-               source_file: Lrc<syntax_pos::SourceFile>,
-               override_span: Option<Span>) -> Self {
-        let mut sr = StringReader::new_raw(sess, source_file, override_span);
-        if sr.advance_token().is_err() {
-            sr.emit_fatal_errors();
-            FatalError.raise();
-        }
-
-        sr
-    }
-
     pub fn new_or_buffered_errs(sess: &'a ParseSess,
                                 source_file: Lrc<syntax_pos::SourceFile>,
                                 override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
@@ -1627,7 +1591,12 @@ mod tests {
                  teststr: String)
                  -> StringReader<'a> {
         let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
-        StringReader::new(sess, sf, None)
+        let mut sr = StringReader::new_raw(sess, sf, None);
+        if sr.advance_token().is_err() {
+            sr.emit_fatal_errors();
+            FatalError.raise();
+        }
+        sr
     }
 
     #[test]
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index 0db36c84cdf..4bfc5bb16c0 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,13 +1,46 @@
+use syntax_pos::Span;
+
 use crate::print::pprust::token_to_string;
 use crate::parse::lexer::{StringReader, UnmatchedBrace};
 use crate::parse::{token, PResult};
 use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
+    crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+        let mut tt_reader = TokenTreesReader {
+            string_reader: self,
+            token: token::Eof,
+            span: syntax_pos::DUMMY_SP,
+            open_braces: Vec::new(),
+            unmatched_braces: Vec::new(),
+            matching_delim_spans: Vec::new(),
+            last_unclosed_found_span: None,
+        };
+        let res = tt_reader.parse_all_token_trees();
+        (res, tt_reader.unmatched_braces)
+    }
+}
+
+struct TokenTreesReader<'a> {
+    string_reader: StringReader<'a>,
+    token: token::Token,
+    span: Span,
+    /// Stack of open delimiters and their spans. Used for error message.
+    open_braces: Vec<(token::DelimToken, Span)>,
+    unmatched_braces: Vec<UnmatchedBrace>,
+    /// The type and spans for all braces
+    ///
+    /// Used only for error recovery when arriving to EOF with mismatched braces.
+    matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
+    last_unclosed_found_span: Option<Span>,
+}
+
+impl<'a> TokenTreesReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
-    crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
+    fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
         let mut tts = Vec::new();
 
+        self.real_token();
         while self.token != token::Eof {
             tts.push(self.parse_token_tree()?);
         }
@@ -34,11 +67,12 @@ impl<'a> StringReader<'a> {
     }
 
     fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
-        let sm = self.sess.source_map();
+        let sm = self.string_reader.sess.source_map();
         match self.token {
             token::Eof => {
                 let msg = "this file contains an un-closed delimiter";
-                let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg);
+                let mut err = self.string_reader.sess.span_diagnostic
+                    .struct_span_err(self.span, msg);
                 for &(_, sp) in &self.open_braces {
                     err.span_label(sp, "un-closed delimiter");
                 }
@@ -46,13 +80,12 @@ impl<'a> StringReader<'a> {
                 if let Some((delim, _)) = self.open_braces.last() {
                     if let Some((_, open_sp, close_sp)) = self.matching_delim_spans.iter()
                         .filter(|(d, open_sp, close_sp)| {
-
-                        if let Some(close_padding) = sm.span_to_margin(*close_sp) {
-                            if let Some(open_padding) = sm.span_to_margin(*open_sp) {
-                                return delim == d && close_padding != open_padding;
+                            if let Some(close_padding) = sm.span_to_margin(*close_sp) {
+                                if let Some(open_padding) = sm.span_to_margin(*open_sp) {
+                                    return delim == d && close_padding != open_padding;
+                                }
                             }
-                        }
-                        false
+                            false
                         }).next()  // these are in reverse order as they get inserted on close, but
                     {              // we want the last open/first close
                         err.span_label(
@@ -164,7 +197,8 @@ impl<'a> StringReader<'a> {
                 // matching opening delimiter).
                 let token_str = token_to_string(&self.token);
                 let msg = format!("unexpected close delimiter: `{}`", token_str);
-                let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
+                let mut err = self.string_reader.sess.span_diagnostic
+                    .struct_span_err(self.span, &msg);
                 err.span_label(self.span, "unexpected close delimiter");
                 Err(err)
             },
@@ -173,11 +207,20 @@ impl<'a> StringReader<'a> {
                 // Note that testing for joint-ness here is done via the raw
                 // source span as the joint-ness is a property of the raw source
                 // rather than wanting to take `override_span` into account.
-                let raw = self.span_src_raw;
+                // Additionally, we actually check if the *next* pair of tokens
+                // is joint, but this is equivalent to checking the current pair.
+                let raw = self.string_reader.peek_span_src_raw;
                 self.real_token();
-                let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
+                let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
+                    && token::is_op(&self.token);
                 Ok((tt, if is_joint { Joint } else { NonJoint }))
             }
         }
     }
+
+    fn real_token(&mut self) {
+        let t = self.string_reader.real_token();
+        self.token = t.tok;
+        self.span = t.sp;
+    }
 }
diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs
new file mode 100644
index 00000000000..53195421ddc
--- /dev/null
+++ b/src/libsyntax/parse/literal.rs
@@ -0,0 +1,487 @@
+//! Code related to parsing literals.
+
+use crate::ast::{self, Ident, Lit, LitKind};
+use crate::parse::parser::Parser;
+use crate::parse::PResult;
+use crate::parse::token::{self, Token};
+use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
+use crate::print::pprust;
+use crate::symbol::{keywords, Symbol};
+use crate::tokenstream::{TokenStream, TokenTree};
+
+use errors::{Applicability, Handler};
+use log::debug;
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::Span;
+
+use std::ascii;
+
+macro_rules! err {
+    ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
+        match $opt_diag {
+            Some(($span, $diag)) => { $($body)* }
+            None => return None,
+        }
+    }
+}
+
+impl LitKind {
+    /// Converts literal token with a suffix into a semantic literal.
+    /// Works speculatively and may return `None` if diagnostic handler is not passed.
+    /// If diagnostic handler is passed, always returns `Some`,
+    /// possibly after reporting non-fatal errors and recovery.
+    fn from_lit_token(
+        lit: token::Lit,
+        suf: Option<Symbol>,
+        diag: Option<(Span, &Handler)>
+    ) -> Option<LitKind> {
+        if suf.is_some() && !lit.may_have_suffix() {
+            err!(diag, |span, diag| {
+                expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf)
+            });
+        }
+
+        Some(match lit {
+            token::Bool(i) => {
+                assert!(i == keywords::True.name() || i == keywords::False.name());
+                LitKind::Bool(i == keywords::True.name())
+            }
+            token::Byte(i) => {
+                match unescape_byte(&i.as_str()) {
+                    Ok(c) => LitKind::Byte(c),
+                    Err(_) => LitKind::Err(i),
+                }
+            },
+            token::Char(i) => {
+                match unescape_char(&i.as_str()) {
+                    Ok(c) => LitKind::Char(c),
+                    Err(_) => LitKind::Err(i),
+                }
+            },
+            token::Err(i) => LitKind::Err(i),
+
+            // There are some valid suffixes for integer and float literals,
+            // so all the handling is done internally.
+            token::Integer(s) => return integer_lit(&s.as_str(), suf, diag),
+            token::Float(s) => return float_lit(&s.as_str(), suf, diag),
+
+            token::Str_(mut sym) => {
+                // If there are no characters requiring special treatment we can
+                // reuse the symbol from the Token. Otherwise, we must generate a
+                // new symbol because the string in the LitKind is different to the
+                // string in the Token.
+                let mut has_error = false;
+                let s = &sym.as_str();
+                if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
+                    let mut buf = String::with_capacity(s.len());
+                    unescape_str(s, &mut |_, unescaped_char| {
+                        match unescaped_char {
+                            Ok(c) => buf.push(c),
+                            Err(_) => has_error = true,
+                        }
+                    });
+                    if has_error {
+                        return Some(LitKind::Err(sym));
+                    }
+                    sym = Symbol::intern(&buf)
+                }
+
+                LitKind::Str(sym, ast::StrStyle::Cooked)
+            }
+            token::StrRaw(mut sym, n) => {
+                // Ditto.
+                let s = &sym.as_str();
+                if s.contains('\r') {
+                    sym = Symbol::intern(&raw_str_lit(s));
+                }
+                LitKind::Str(sym, ast::StrStyle::Raw(n))
+            }
+            token::ByteStr(i) => {
+                let s = &i.as_str();
+                let mut buf = Vec::with_capacity(s.len());
+                let mut has_error = false;
+                unescape_byte_str(s, &mut |_, unescaped_byte| {
+                    match unescaped_byte {
+                        Ok(c) => buf.push(c),
+                        Err(_) => has_error = true,
+                    }
+                });
+                if has_error {
+                    return Some(LitKind::Err(i));
+                }
+                buf.shrink_to_fit();
+                LitKind::ByteStr(Lrc::new(buf))
+            }
+            token::ByteStrRaw(i, _) => {
+                LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))
+            }
+        })
+    }
+
+    /// Attempts to recover a token from semantic literal.
+    /// This function is used when the original token doesn't exist (e.g. the literal is created
+    /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
+    pub fn to_lit_token(&self) -> (token::Lit, Option<Symbol>) {
+        match *self {
+            LitKind::Str(string, ast::StrStyle::Cooked) => {
+                let escaped = string.as_str().escape_default().to_string();
+                (token::Lit::Str_(Symbol::intern(&escaped)), None)
+            }
+            LitKind::Str(string, ast::StrStyle::Raw(n)) => {
+                (token::Lit::StrRaw(string, n), None)
+            }
+            LitKind::ByteStr(ref bytes) => {
+                let string = bytes.iter().cloned().flat_map(ascii::escape_default)
+                    .map(Into::<char>::into).collect::<String>();
+                (token::Lit::ByteStr(Symbol::intern(&string)), None)
+            }
+            LitKind::Byte(byte) => {
+                let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
+                (token::Lit::Byte(Symbol::intern(&string)), None)
+            }
+            LitKind::Char(ch) => {
+                let string: String = ch.escape_default().map(Into::<char>::into).collect();
+                (token::Lit::Char(Symbol::intern(&string)), None)
+            }
+            LitKind::Int(n, ty) => {
+                let suffix = match ty {
+                    ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())),
+                    ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
+                    ast::LitIntType::Unsuffixed => None,
+                };
+                (token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
+            }
+            LitKind::Float(symbol, ty) => {
+                (token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
+            }
+            LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
+            LitKind::Bool(value) => {
+                let kw = if value { keywords::True } else { keywords::False };
+                (token::Lit::Bool(kw.name()), None)
+            }
+            LitKind::Err(val) => (token::Lit::Err(val), None),
+        }
+    }
+}
+
+impl Lit {
+    /// Converts literal token with a suffix into an AST literal.
+    /// Works speculatively and may return `None` if diagnostic handler is not passed.
+    /// If diagnostic handler is passed, may return `Some`,
+    /// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors.
+    crate fn from_token(
+        token: &token::Token,
+        span: Span,
+        diag: Option<(Span, &Handler)>,
+    ) -> Option<Lit> {
+        let (token, suffix) = match *token {
+            token::Ident(ident, false) if ident.name == keywords::True.name() ||
+                                          ident.name == keywords::False.name() =>
+                (token::Bool(ident.name), None),
+            token::Literal(token, suffix) =>
+                (token, suffix),
+            token::Interpolated(ref nt) => {
+                if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
+                    if let ast::ExprKind::Lit(lit) = &expr.node {
+                        return Some(lit.clone());
+                    }
+                }
+                return None;
+            }
+            _ => return None,
+        };
+
+        let node = LitKind::from_lit_token(token, suffix, diag)?;
+        Some(Lit { node, token, suffix, span })
+    }
+
+    /// Attempts to recover an AST literal from semantic literal.
+    /// This function is used when the original token doesn't exist (e.g. the literal is created
+    /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
+    pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
+        let (token, suffix) = node.to_lit_token();
+        Lit { node, token, suffix, span }
+    }
+
+    /// Losslessly convert an AST literal into a token stream.
+    crate fn tokens(&self) -> TokenStream {
+        let token = match self.token {
+            token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
+            token => Token::Literal(token, self.suffix),
+        };
+        TokenTree::Token(self.span, token).into()
+    }
+}
+
+impl<'a> Parser<'a> {
+    /// Matches `lit = true | false | token_lit`.
+    crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
+        let diag = Some((self.span, &self.sess.span_diagnostic));
+        if let Some(lit) = Lit::from_token(&self.token, self.span, diag) {
+            self.bump();
+            return Ok(lit);
+        } else if self.token == token::Dot {
+            // Recover `.4` as `0.4`.
+            let recovered = self.look_ahead(1, |t| {
+                if let token::Literal(token::Integer(val), suf) = *t {
+                    let next_span = self.look_ahead_span(1);
+                    if self.span.hi() == next_span.lo() {
+                        let sym = String::from("0.") + &val.as_str();
+                        let token = token::Literal(token::Float(Symbol::intern(&sym)), suf);
+                        return Some((token, self.span.to(next_span)));
+                    }
+                }
+                None
+            });
+            if let Some((token, span)) = recovered {
+                self.diagnostic()
+                    .struct_span_err(span, "float literals must have an integer part")
+                    .span_suggestion(
+                        span,
+                        "must have an integer part",
+                        pprust::token_to_string(&token),
+                        Applicability::MachineApplicable,
+                    )
+                    .emit();
+                let diag = Some((span, &self.sess.span_diagnostic));
+                if let Some(lit) = Lit::from_token(&token, span, diag) {
+                    self.bump();
+                    self.bump();
+                    return Ok(lit);
+                }
+            }
+        }
+
+        Err(self.span_fatal(self.span, &format!("unexpected token: {}", self.this_token_descr())))
+    }
+}
+
+crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option<ast::Name>) {
+    match suffix {
+        None => {/* everything ok */}
+        Some(suf) => {
+            let text = suf.as_str();
+            if text.is_empty() {
+                diag.span_bug(sp, "found empty literal suffix in Some")
+            }
+            let mut err = if kind == "a tuple index" &&
+                ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
+            {
+                // #59553: warn instead of reject out of hand to allow the fix to percolate
+                // through the ecosystem when people fix their macros
+                let mut err = diag.struct_span_warn(
+                    sp,
+                    &format!("suffixes on {} are invalid", kind),
+                );
+                err.note(&format!(
+                    "`{}` is *temporarily* accepted on tuple index fields as it was \
+                        incorrectly accepted on stable for a few releases",
+                    text,
+                ));
+                err.help(
+                    "on proc macros, you'll want to use `syn::Index::from` or \
+                        `proc_macro::Literal::*_unsuffixed` for code that will desugar \
+                        to tuple field access",
+                );
+                err.note(
+                    "for more context, see https://github.com/rust-lang/rust/issues/60210",
+                );
+                err
+            } else {
+                diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+            };
+            err.span_label(sp, format!("invalid suffix `{}`", text));
+            err.emit();
+        }
+    }
+}
+
+/// Parses a string representing a raw string literal into its final form. The
+/// only operation this does is convert embedded CRLF into a single LF.
+fn raw_str_lit(lit: &str) -> String {
+    debug!("raw_str_lit: given {}", lit.escape_default());
+    let mut res = String::with_capacity(lit.len());
+
+    let mut chars = lit.chars().peekable();
+    while let Some(c) = chars.next() {
+        if c == '\r' {
+            if *chars.peek().unwrap() != '\n' {
+                panic!("lexer accepted bare CR");
+            }
+            chars.next();
+            res.push('\n');
+        } else {
+            res.push(c);
+        }
+    }
+
+    res.shrink_to_fit();
+    res
+}
+
+// check if `s` looks like i32 or u1234 etc.
+fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
+    s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
+}
+
+fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                      -> Option<LitKind> {
+    debug!("filtered_float_lit: {}, {:?}", data, suffix);
+    let suffix = match suffix {
+        Some(suffix) => suffix,
+        None => return Some(LitKind::FloatUnsuffixed(data)),
+    };
+
+    Some(match &*suffix.as_str() {
+        "f32" => LitKind::Float(data, ast::FloatTy::F32),
+        "f64" => LitKind::Float(data, ast::FloatTy::F64),
+        suf => {
+            err!(diag, |span, diag| {
+                if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
+                    // if it looks like a width, lets try to be helpful.
+                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+                    diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
+                } else {
+                    let msg = format!("invalid suffix `{}` for float literal", suf);
+                    diag.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("valid suffixes are `f32` and `f64`")
+                        .emit();
+                }
+            });
+
+            LitKind::FloatUnsuffixed(data)
+        }
+    })
+}
+fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                 -> Option<LitKind> {
+    debug!("float_lit: {:?}, {:?}", s, suffix);
+    // FIXME #2252: bounds checking float literals is deferred until trans
+
+    // Strip underscores without allocating a new String unless necessary.
+    let s2;
+    let s = if s.chars().any(|c| c == '_') {
+        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
+        &s2
+    } else {
+        s
+    };
+
+    filtered_float_lit(Symbol::intern(s), suffix, diag)
+}
+
+fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                   -> Option<LitKind> {
+    // s can only be ascii, byte indexing is fine
+
+    // Strip underscores without allocating a new String unless necessary.
+    let s2;
+    let mut s = if s.chars().any(|c| c == '_') {
+        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
+        &s2
+    } else {
+        s
+    };
+
+    debug!("integer_lit: {}, {:?}", s, suffix);
+
+    let mut base = 10;
+    let orig = s;
+    let mut ty = ast::LitIntType::Unsuffixed;
+
+    if s.starts_with('0') && s.len() > 1 {
+        match s.as_bytes()[1] {
+            b'x' => base = 16,
+            b'o' => base = 8,
+            b'b' => base = 2,
+            _ => { }
+        }
+    }
+
+    // 1f64 and 2f32 etc. are valid float literals.
+    if let Some(suf) = suffix {
+        if looks_like_width_suffix(&['f'], &suf.as_str()) {
+            let err = match base {
+                16 => Some("hexadecimal float literal is not supported"),
+                8 => Some("octal float literal is not supported"),
+                2 => Some("binary float literal is not supported"),
+                _ => None,
+            };
+            if let Some(err) = err {
+                err!(diag, |span, diag| {
+                    diag.struct_span_err(span, err)
+                        .span_label(span, "not supported")
+                        .emit();
+                });
+            }
+            return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
+        }
+    }
+
+    if base != 10 {
+        s = &s[2..];
+    }
+
+    if let Some(suf) = suffix {
+        if suf.as_str().is_empty() {
+            err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
+        }
+        ty = match &*suf.as_str() {
+            "isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
+            "i8"  => ast::LitIntType::Signed(ast::IntTy::I8),
+            "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
+            "i32" => ast::LitIntType::Signed(ast::IntTy::I32),
+            "i64" => ast::LitIntType::Signed(ast::IntTy::I64),
+            "i128" => ast::LitIntType::Signed(ast::IntTy::I128),
+            "usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
+            "u8"  => ast::LitIntType::Unsigned(ast::UintTy::U8),
+            "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
+            "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
+            "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
+            "u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
+            suf => {
+                // i<digits> and u<digits> look like widths, so lets
+                // give an error message along those lines
+                err!(diag, |span, diag| {
+                    if looks_like_width_suffix(&['i', 'u'], suf) {
+                        let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+                        diag.struct_span_err(span, &msg)
+                            .help("valid widths are 8, 16, 32, 64 and 128")
+                            .emit();
+                    } else {
+                        let msg = format!("invalid suffix `{}` for numeric literal", suf);
+                        diag.struct_span_err(span, &msg)
+                            .span_label(span, format!("invalid suffix `{}`", suf))
+                            .help("the suffix must be one of the integral types \
+                                   (`u32`, `isize`, etc)")
+                            .emit();
+                    }
+                });
+
+                ty
+            }
+        }
+    }
+
+    debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
+           string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
+
+    Some(match u128::from_str_radix(s, base) {
+        Ok(r) => LitKind::Int(r, ty),
+        Err(_) => {
+            // small bases are lexed as if they were base 10, e.g, the string
+            // might be `0b10201`. This will cause the conversion above to fail,
+            // but these cases have errors in the lexer: we don't want to emit
+            // two errors, and we especially don't want to emit this error since
+            // it isn't necessarily true.
+            let already_errored = base < 10 &&
+                s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+
+            if !already_errored {
+                err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
+            }
+            LitKind::Int(0, ty)
+        }
+    })
+}
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index be44b964ba5..0611c1d9b42 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -5,7 +5,6 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
 use crate::source_map::{SourceMap, FilePathMapping};
 use crate::feature_gate::UnstableFeatures;
 use crate::parse::parser::Parser;
-use crate::symbol::Symbol;
 use crate::syntax::parse::parser::emit_unclosed_delims;
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::diagnostics::plugin::ErrorMap;
@@ -14,7 +13,6 @@ use crate::print::pprust::token_to_string;
 use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
 use rustc_data_structures::sync::{Lrc, Lock};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use log::debug;
 
 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use std::borrow::Cow;
@@ -25,18 +23,15 @@ pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
 
 #[macro_use]
 pub mod parser;
-
+pub mod attr;
 pub mod lexer;
 pub mod token;
-pub mod attr;
-pub mod diagnostics;
-
-pub mod classify;
-
-pub(crate) mod unescape;
-use unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
 
-pub(crate) mod unescape_error_reporting;
+crate mod classify;
+crate mod diagnostics;
+crate mod literal;
+crate mod unescape;
+crate mod unescape_error_reporting;
 
 /// Info about a parsing session.
 pub struct ParseSess {
@@ -295,22 +290,22 @@ pub fn source_file_to_stream(
 }
 
 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
-/// parsing the token tream.
+/// parsing the token stream.
 pub fn maybe_file_to_stream(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
 ) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
-    let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
-    srdr.real_token();
+    let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
+    let (token_trees, unmatched_braces) = srdr.into_token_trees();
 
-    match srdr.parse_all_token_trees() {
-        Ok(stream) => Ok((stream, srdr.unmatched_braces)),
+    match token_trees {
+        Ok(stream) => Ok((stream, unmatched_braces)),
         Err(err) => {
             let mut buffer = Vec::with_capacity(1);
             err.buffer(&mut buffer);
             // Not using `emit_unclosed_delims` to use `db.buffer`
-            for unmatched in srdr.unmatched_braces {
+            for unmatched in unmatched_braces {
                 let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
                     "incorrect close delimiter: `{}`",
                     token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
@@ -334,284 +329,6 @@ pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
     Parser::new(sess, stream, None, true, false)
 }
 
-/// Parses a string representing a raw string literal into its final form. The
-/// only operation this does is convert embedded CRLF into a single LF.
-fn raw_str_lit(lit: &str) -> String {
-    debug!("raw_str_lit: given {}", lit.escape_default());
-    let mut res = String::with_capacity(lit.len());
-
-    let mut chars = lit.chars().peekable();
-    while let Some(c) = chars.next() {
-        if c == '\r' {
-            if *chars.peek().unwrap() != '\n' {
-                panic!("lexer accepted bare CR");
-            }
-            chars.next();
-            res.push('\n');
-        } else {
-            res.push(c);
-        }
-    }
-
-    res.shrink_to_fit();
-    res
-}
-
-// check if `s` looks like i32 or u1234 etc.
-fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
-    s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
-}
-
-macro_rules! err {
-    ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
-        match $opt_diag {
-            Some(($span, $diag)) => { $($body)* }
-            None => return None,
-        }
-    }
-}
-
-crate fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                 -> (bool /* suffix illegal? */, Option<ast::LitKind>) {
-    use ast::LitKind;
-
-    match lit {
-        token::Byte(i) => {
-            let lit_kind = match unescape_byte(&i.as_str()) {
-                Ok(c) => LitKind::Byte(c),
-                Err(_) => LitKind::Err(i),
-            };
-            (true, Some(lit_kind))
-        },
-        token::Char(i) => {
-            let lit_kind = match unescape_char(&i.as_str()) {
-                Ok(c) => LitKind::Char(c),
-                Err(_) => LitKind::Err(i),
-            };
-            (true, Some(lit_kind))
-        },
-        token::Err(i) => (true, Some(LitKind::Err(i))),
-
-        // There are some valid suffixes for integer and float literals,
-        // so all the handling is done internally.
-        token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
-        token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
-
-        token::Str_(mut sym) => {
-            // If there are no characters requiring special treatment we can
-            // reuse the symbol from the Token. Otherwise, we must generate a
-            // new symbol because the string in the LitKind is different to the
-            // string in the Token.
-            let mut has_error = false;
-            let s = &sym.as_str();
-            if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
-                let mut buf = String::with_capacity(s.len());
-                unescape_str(s, &mut |_, unescaped_char| {
-                    match unescaped_char {
-                        Ok(c) => buf.push(c),
-                        Err(_) => has_error = true,
-                    }
-                });
-                if has_error {
-                    return (true, Some(LitKind::Err(sym)));
-                }
-                sym = Symbol::intern(&buf)
-            }
-
-            (true, Some(LitKind::Str(sym, ast::StrStyle::Cooked)))
-        }
-        token::StrRaw(mut sym, n) => {
-            // Ditto.
-            let s = &sym.as_str();
-            if s.contains('\r') {
-                sym = Symbol::intern(&raw_str_lit(s));
-            }
-            (true, Some(LitKind::Str(sym, ast::StrStyle::Raw(n))))
-        }
-        token::ByteStr(i) => {
-            let s = &i.as_str();
-            let mut buf = Vec::with_capacity(s.len());
-            let mut has_error = false;
-            unescape_byte_str(s, &mut |_, unescaped_byte| {
-                match unescaped_byte {
-                    Ok(c) => buf.push(c),
-                    Err(_) => has_error = true,
-                }
-            });
-            if has_error {
-                return (true, Some(LitKind::Err(i)));
-            }
-            buf.shrink_to_fit();
-            (true, Some(LitKind::ByteStr(Lrc::new(buf))))
-        }
-        token::ByteStrRaw(i, _) => {
-            (true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))))
-        }
-    }
-}
-
-fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                      -> Option<ast::LitKind> {
-    debug!("filtered_float_lit: {}, {:?}", data, suffix);
-    let suffix = match suffix {
-        Some(suffix) => suffix,
-        None => return Some(ast::LitKind::FloatUnsuffixed(data)),
-    };
-
-    Some(match &*suffix.as_str() {
-        "f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
-        "f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
-        suf => {
-            err!(diag, |span, diag| {
-                if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
-                    // if it looks like a width, lets try to be helpful.
-                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
-                    diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
-                } else {
-                    let msg = format!("invalid suffix `{}` for float literal", suf);
-                    diag.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{}`", suf))
-                        .help("valid suffixes are `f32` and `f64`")
-                        .emit();
-                }
-            });
-
-            ast::LitKind::FloatUnsuffixed(data)
-        }
-    })
-}
-fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                 -> Option<ast::LitKind> {
-    debug!("float_lit: {:?}, {:?}", s, suffix);
-    // FIXME #2252: bounds checking float literals is deferred until trans
-
-    // Strip underscores without allocating a new String unless necessary.
-    let s2;
-    let s = if s.chars().any(|c| c == '_') {
-        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-        &s2
-    } else {
-        s
-    };
-
-    filtered_float_lit(Symbol::intern(s), suffix, diag)
-}
-
-fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-                   -> Option<ast::LitKind> {
-    // s can only be ascii, byte indexing is fine
-
-    // Strip underscores without allocating a new String unless necessary.
-    let s2;
-    let mut s = if s.chars().any(|c| c == '_') {
-        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-        &s2
-    } else {
-        s
-    };
-
-    debug!("integer_lit: {}, {:?}", s, suffix);
-
-    let mut base = 10;
-    let orig = s;
-    let mut ty = ast::LitIntType::Unsuffixed;
-
-    if s.starts_with('0') && s.len() > 1 {
-        match s.as_bytes()[1] {
-            b'x' => base = 16,
-            b'o' => base = 8,
-            b'b' => base = 2,
-            _ => { }
-        }
-    }
-
-    // 1f64 and 2f32 etc. are valid float literals.
-    if let Some(suf) = suffix {
-        if looks_like_width_suffix(&['f'], &suf.as_str()) {
-            let err = match base {
-                16 => Some("hexadecimal float literal is not supported"),
-                8 => Some("octal float literal is not supported"),
-                2 => Some("binary float literal is not supported"),
-                _ => None,
-            };
-            if let Some(err) = err {
-                err!(diag, |span, diag| {
-                    diag.struct_span_err(span, err)
-                        .span_label(span, "not supported")
-                        .emit();
-                });
-            }
-            return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
-        }
-    }
-
-    if base != 10 {
-        s = &s[2..];
-    }
-
-    if let Some(suf) = suffix {
-        if suf.as_str().is_empty() {
-            err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
-        }
-        ty = match &*suf.as_str() {
-            "isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
-            "i8"  => ast::LitIntType::Signed(ast::IntTy::I8),
-            "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
-            "i32" => ast::LitIntType::Signed(ast::IntTy::I32),
-            "i64" => ast::LitIntType::Signed(ast::IntTy::I64),
-            "i128" => ast::LitIntType::Signed(ast::IntTy::I128),
-            "usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
-            "u8"  => ast::LitIntType::Unsigned(ast::UintTy::U8),
-            "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
-            "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
-            "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
-            "u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
-            suf => {
-                // i<digits> and u<digits> look like widths, so lets
-                // give an error message along those lines
-                err!(diag, |span, diag| {
-                    if looks_like_width_suffix(&['i', 'u'], suf) {
-                        let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
-                        diag.struct_span_err(span, &msg)
-                            .help("valid widths are 8, 16, 32, 64 and 128")
-                            .emit();
-                    } else {
-                        let msg = format!("invalid suffix `{}` for numeric literal", suf);
-                        diag.struct_span_err(span, &msg)
-                            .span_label(span, format!("invalid suffix `{}`", suf))
-                            .help("the suffix must be one of the integral types \
-                                   (`u32`, `isize`, etc)")
-                            .emit();
-                    }
-                });
-
-                ty
-            }
-        }
-    }
-
-    debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
-           string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
-
-    Some(match u128::from_str_radix(s, base) {
-        Ok(r) => ast::LitKind::Int(r, ty),
-        Err(_) => {
-            // small bases are lexed as if they were base 10, e.g, the string
-            // might be `0b10201`. This will cause the conversion above to fail,
-            // but these cases have errors in the lexer: we don't want to emit
-            // two errors, and we especially don't want to emit this error since
-            // it isn't necessarily true.
-            let already_errored = base < 10 &&
-                s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
-
-            if !already_errored {
-                err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
-            }
-            ast::LitKind::Int(0, ty)
-        }
-    })
-}
-
 /// A sequence separator.
 pub struct SeqSep {
     /// The seperator token.
@@ -674,6 +391,8 @@ mod tests {
     #[test]
     fn string_to_tts_macro () {
         with_globals(|| {
+            use crate::symbol::sym;
+
             let tts: Vec<_> =
                 string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
             let tts: &[TokenTree] = &tts[..];
@@ -686,8 +405,8 @@ mod tests {
                     Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
                     Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
                 )
-                if name_macro_rules.name == "macro_rules"
-                && name_zip.name == "zip" => {
+                if name_macro_rules.name == sym::macro_rules
+                && name_zip.name.as_str() == "zip" => {
                     let tts = &macro_tts.trees().collect::<Vec<_>>();
                     match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                         (
@@ -704,7 +423,7 @@ mod tests {
                                     Some(&TokenTree::Token(_, token::Dollar)),
                                     Some(&TokenTree::Token(_, token::Ident(ident, false))),
                                 )
-                                if first_delim == token::Paren && ident.name == "a" => {},
+                                if first_delim == token::Paren && ident.name.as_str() == "a" => {},
                                 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                             }
                             let tts = &second_tts.trees().collect::<Vec<_>>();
@@ -714,7 +433,7 @@ mod tests {
                                     Some(&TokenTree::Token(_, token::Dollar)),
                                     Some(&TokenTree::Token(_, token::Ident(ident, false))),
                                 )
-                                if second_delim == token::Paren && ident.name == "a" => {},
+                                if second_delim == token::Paren && ident.name.as_str() == "a" => {},
                                 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
                             }
                         },
@@ -858,20 +577,22 @@ mod tests {
 
     #[test] fn crlf_doc_comments() {
         with_globals(|| {
+            use crate::symbol::sym;
+
             let sess = ParseSess::new(FilePathMapping::empty());
 
             let name_1 = FileName::Custom("crlf_source_1".to_string());
             let source = "/// doc comment\r\nfn foo() {}".to_string();
             let item = parse_item_from_source_str(name_1, source, &sess)
                 .unwrap().unwrap();
-            let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-            assert_eq!(doc, "/// doc comment");
+            let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
+            assert_eq!(doc.as_str(), "/// doc comment");
 
             let name_2 = FileName::Custom("crlf_source_2".to_string());
             let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
             let item = parse_item_from_source_str(name_2, source, &sess)
                 .unwrap().unwrap();
-            let docs = item.attrs.iter().filter(|a| a.path == "doc")
+            let docs = item.attrs.iter().filter(|a| a.path == sym::doc)
                         .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
             let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
             assert_eq!(&docs[..], b);
@@ -879,8 +600,8 @@ mod tests {
             let name_3 = FileName::Custom("clrf_source_3".to_string());
             let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
             let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
-            let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-            assert_eq!(doc, "/** doc comment\n *  with CRLF */");
+            let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
+            assert_eq!(doc.as_str(), "/** doc comment\n *  with CRLF */");
         });
     }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index d97d1e2f0f4..24d120376de 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -15,7 +15,7 @@ use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
 use crate::ast::{GenericParam, GenericParamKind};
 use crate::ast::GenericArg;
 use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
-use crate::ast::{Label, Lifetime, Lit, LitKind};
+use crate::ast::{Label, Lifetime};
 use crate::ast::{Local, LocalSource};
 use crate::ast::MacStmtStyle;
 use crate::ast::{Mac, Mac_, MacDelimiter};
@@ -35,7 +35,7 @@ use crate::ast::{RangeEnd, RangeSyntax};
 use crate::{ast, attr};
 use crate::ext::base::DummyResult;
 use crate::source_map::{self, SourceMap, Spanned, respan};
-use crate::parse::{self, SeqSep, classify, token};
+use crate::parse::{SeqSep, classify, literal, token};
 use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::token::DelimToken;
@@ -46,7 +46,7 @@ use crate::ptr::P;
 use crate::parse::PResult;
 use crate::ThinVec;
 use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use crate::symbol::{Symbol, keywords};
+use crate::symbol::{keywords, sym, Symbol};
 
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
 use rustc_target::spec::abi::{self, Abi};
@@ -104,14 +104,14 @@ pub enum PathStyle {
 }
 
 #[derive(Clone, Copy, PartialEq, Debug)]
-enum SemiColonMode {
+crate enum SemiColonMode {
     Break,
     Ignore,
     Comma,
 }
 
 #[derive(Clone, Copy, PartialEq, Debug)]
-enum BlockMode {
+crate enum BlockMode {
     Break,
     Ignore,
 }
@@ -352,7 +352,7 @@ impl TokenCursor {
         let body = TokenTree::Delimited(
             delim_span,
             token::Bracket,
-            [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
+            [TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
              TokenTree::Token(sp, token::Eq),
              TokenTree::Token(sp, token::Literal(
                 token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
@@ -389,7 +389,7 @@ crate enum TokenType {
 }
 
 impl TokenType {
-    fn to_string(&self) -> String {
+    crate fn to_string(&self) -> String {
         match *self {
             TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
             TokenType::Keyword(kw) => format!("`{}`", kw.name()),
@@ -613,7 +613,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn this_token_descr(&self) -> String {
+    crate fn this_token_descr(&self) -> String {
         if let Some(prefix) = self.token_descr() {
             format!("{} `{}`", prefix, self.this_token_to_string())
         } else {
@@ -621,11 +621,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> {
-        let token_str = pprust::token_to_string(t);
-        Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str)))
-    }
-
     crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
         match self.expect_one_of(&[], &[]) {
             Err(e) => Err(e),
@@ -678,56 +673,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn recover_closing_delimiter(
-        &mut self,
-        tokens: &[token::Token],
-        mut err: DiagnosticBuilder<'a>,
-    ) -> PResult<'a, bool> {
-        let mut pos = None;
-        // we want to use the last closing delim that would apply
-        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
-            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
-                && Some(self.span) > unmatched.unclosed_span
-            {
-                pos = Some(i);
-            }
-        }
-        match pos {
-            Some(pos) => {
-                // Recover and assume that the detected unclosed delimiter was meant for
-                // this location. Emit the diagnostic and act as if the delimiter was
-                // present for the parser's sake.
-
-                 // Don't attempt to recover from this unclosed delimiter more than once.
-                let unmatched = self.unclosed_delims.remove(pos);
-                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
-
-                 // We want to suggest the inclusion of the closing delimiter where it makes
-                // the most sense, which is immediately after the last token:
-                //
-                //  {foo(bar {}}
-                //      -      ^
-                //      |      |
-                //      |      help: `)` may belong here (FIXME: #58270)
-                //      |
-                //      unclosed delimiter
-                if let Some(sp) = unmatched.unclosed_span {
-                    err.span_label(sp, "unclosed delimiter");
-                }
-                err.span_suggestion_short(
-                    self.sess.source_map().next_point(self.prev_span),
-                    &format!("{} may belong here", delim.to_string()),
-                    delim.to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-                err.emit();
-                self.expected_tokens.clear();  // reduce errors
-                Ok(true)
-            }
-            _ => Err(err),
-        }
-    }
-
     /// Expect next token to be edible or inedible token.  If edible,
     /// then consume it; if inedible, then return without consuming
     /// anything.  Signal a fatal error if next token is unexpected.
@@ -1109,43 +1054,7 @@ impl<'a> Parser<'a> {
     }
 
     fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
-        match suffix {
-            None => {/* everything ok */}
-            Some(suf) => {
-                let text = suf.as_str();
-                if text.is_empty() {
-                    self.span_bug(sp, "found empty literal suffix in Some")
-                }
-                let mut err = if kind == "a tuple index" &&
-                    ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
-                {
-                    // #59553: warn instead of reject out of hand to allow the fix to percolate
-                    // through the ecosystem when people fix their macros
-                    let mut err = self.struct_span_warn(
-                        sp,
-                        &format!("suffixes on {} are invalid", kind),
-                    );
-                    err.note(&format!(
-                        "`{}` is *temporarily* accepted on tuple index fields as it was \
-                         incorrectly accepted on stable for a few releases",
-                        text,
-                    ));
-                    err.help(
-                        "on proc macros, you'll want to use `syn::Index::from` or \
-                         `proc_macro::Literal::*_unsuffixed` for code that will desugar \
-                         to tuple field access",
-                    );
-                    err.note(
-                        "for more context, see https://github.com/rust-lang/rust/issues/60210",
-                    );
-                    err
-                } else {
-                    self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
-                };
-                err.span_label(sp, format!("invalid suffix `{}`", text));
-                err.emit();
-            }
-        }
+        literal::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
     }
 
     /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
@@ -1423,7 +1332,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn look_ahead_span(&self, dist: usize) -> Span {
+    crate fn look_ahead_span(&self, dist: usize) -> Span {
         if dist == 0 {
             return self.span
         }
@@ -1452,9 +1361,6 @@ impl<'a> Parser<'a> {
     crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
         self.sess.span_diagnostic.struct_span_err(sp, m)
     }
-    fn struct_span_warn<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
-        self.sess.span_diagnostic.struct_span_warn(sp, m)
-    }
     crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
         self.sess.span_diagnostic.span_bug(sp, m)
     }
@@ -1877,7 +1783,7 @@ impl<'a> Parser<'a> {
         Ok(MutTy { ty: t, mutbl: mutbl })
     }
 
-    fn is_named_argument(&mut self) -> bool {
+    fn is_named_argument(&self) -> bool {
         let offset = match self.token {
             token::Interpolated(ref nt) => match **nt {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
@@ -1925,8 +1831,6 @@ impl<'a> Parser<'a> {
     /// This version of parse arg doesn't necessarily require identifier names.
     fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool,
                          allow_c_variadic: bool) -> PResult<'a, Arg> {
-        maybe_whole!(self, NtArg, |x| x);
-
         if let Ok(Some(_)) = self.parse_self_arg() {
             let mut err = self.struct_span_err(self.prev_span,
                 "unexpected `self` argument in function");
@@ -2069,88 +1973,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Matches `token_lit = LIT_INTEGER | ...`.
-    fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
-        let out = match self.token {
-            token::Interpolated(ref nt) => match **nt {
-                token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
-                    ExprKind::Lit(ref lit) => { lit.node.clone() }
-                    _ => { return self.unexpected_last(&self.token); }
-                },
-                _ => { return self.unexpected_last(&self.token); }
-            },
-            token::Literal(lit, suf) => {
-                let diag = Some((self.span, &self.sess.span_diagnostic));
-                let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
-
-                if suffix_illegal {
-                    let sp = self.span;
-                    self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf)
-                }
-
-                result.unwrap()
-            }
-            token::Dot if self.look_ahead(1, |t| match t {
-                token::Literal(parse::token::Lit::Integer(_) , _) => true,
-                _ => false,
-            }) => { // recover from `let x = .4;`
-                let lo = self.span;
-                self.bump();
-                if let token::Literal(
-                    parse::token::Lit::Integer(val),
-                    suffix,
-                ) = self.token {
-                    let suffix = suffix.and_then(|s| {
-                        let s = s.as_str();
-                        if s == "f32" {
-                            Some("f32")
-                        } else if s == "f64" {
-                            Some("f64")
-                        } else {
-                            None
-                        }
-                    }).unwrap_or("");
-                    self.bump();
-                    let sp = lo.to(self.prev_span);
-                    let mut err = self.diagnostic()
-                        .struct_span_err(sp, "float literals must have an integer part");
-                    err.span_suggestion(
-                        sp,
-                        "must have an integer part",
-                        format!("0.{}{}", val, suffix),
-                        Applicability::MachineApplicable,
-                    );
-                    err.emit();
-                    return Ok(match suffix {
-                        "f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
-                        "f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
-                        _ => ast::LitKind::FloatUnsuffixed(val),
-                    });
-                } else {
-                    unreachable!();
-                };
-            }
-            _ => { return self.unexpected_last(&self.token); }
-        };
-
-        self.bump();
-        Ok(out)
-    }
-
-    /// Matches `lit = true | false | token_lit`.
-    crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
-        let lo = self.span;
-        let lit = if self.eat_keyword(keywords::True) {
-            LitKind::Bool(true)
-        } else if self.eat_keyword(keywords::False) {
-            LitKind::Bool(false)
-        } else {
-            let lit = self.parse_lit_token()?;
-            lit
-        };
-        Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
-    }
-
     /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
     crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
@@ -2471,27 +2293,27 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
+    crate fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
         P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
     }
 
-    fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
+    fn mk_unary(&self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
         ExprKind::Unary(unop, expr)
     }
 
-    fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
+    fn mk_binary(&self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
         ExprKind::Binary(binop, lhs, rhs)
     }
 
-    fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
+    fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
         ExprKind::Call(f, args)
     }
 
-    fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
+    fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
         ExprKind::Index(expr, idx)
     }
 
-    fn mk_range(&mut self,
+    fn mk_range(&self,
                     start: Option<P<Expr>>,
                     end: Option<P<Expr>>,
                     limits: RangeLimits)
@@ -2503,7 +2325,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn mk_assign_op(&mut self, binop: ast::BinOp,
+    fn mk_assign_op(&self, binop: ast::BinOp,
                         lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
         ExprKind::AssignOp(binop, lhs, rhs)
     }
@@ -2643,13 +2465,12 @@ impl<'a> Parser<'a> {
                     hi = path.span;
                     return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
                 }
-                if self.span.rust_2018() && self.check_keyword(keywords::Async)
-                {
-                    if self.is_async_block() { // check for `async {` and `async move {`
-                        return self.parse_async_block(attrs);
+                if self.span.rust_2018() && self.check_keyword(keywords::Async) {
+                    return if self.is_async_block() { // check for `async {` and `async move {`
+                        self.parse_async_block(attrs)
                     } else {
-                        return self.parse_lambda_expr(attrs);
-                    }
+                        self.parse_lambda_expr(attrs)
+                    };
                 }
                 if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
                     return self.parse_lambda_expr(attrs);
@@ -2758,13 +2579,9 @@ impl<'a> Parser<'a> {
                     db.note("variable declaration using `let` is a statement");
                     return Err(db);
                 } else if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
-                    // FIXME: remove this branch when `await!` is no longer supported
-                    // https://github.com/rust-lang/rust/issues/60610
-                    self.expect(&token::Not)?;
-                    self.expect(&token::OpenDelim(token::Paren))?;
-                    let expr = self.parse_expr()?;
-                    self.expect(&token::CloseDelim(token::Paren))?;
-                    ex = ExprKind::Await(ast::AwaitOrigin::MacroLike, expr);
+                    let (await_hi, e_kind) = self.parse_await_macro_or_alt(lo, self.prev_span)?;
+                    hi = await_hi;
+                    ex = e_kind;
                 } else if self.token.is_path_start() {
                     let path = self.parse_path(PathStyle::Expr)?;
 
@@ -2829,6 +2646,31 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr, true)
     }
 
+    /// Parse `await!(<expr>)` calls, or alternatively recover from incorrect but reasonable
+    /// alternative syntaxes `await <expr>`, `await? <expr>`, `await(<expr>)` and
+    /// `await { <expr> }`.
+    fn parse_await_macro_or_alt(
+        &mut self,
+        lo: Span,
+        await_sp: Span,
+    ) -> PResult<'a, (Span, ExprKind)> {
+        if self.token == token::Not {
+            // Handle correct `await!(<expr>)`.
+            // FIXME: make this an error when `await!` is no longer supported
+            // https://github.com/rust-lang/rust/issues/60610
+            self.expect(&token::Not)?;
+            self.expect(&token::OpenDelim(token::Paren))?;
+            let expr = self.parse_expr().map_err(|mut err| {
+                err.span_label(await_sp, "while parsing this await macro call");
+                err
+            })?;
+            self.expect(&token::CloseDelim(token::Paren))?;
+            Ok((self.prev_span, ExprKind::Await(ast::AwaitOrigin::MacroLike, expr)))
+        } else { // Handle `await <expr>`.
+            self.parse_incorrect_await_syntax(lo, await_sp)
+        }
+    }
+
     fn maybe_parse_struct_expr(
         &mut self,
         lo: Span,
@@ -2977,10 +2819,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a block or unsafe block.
-    fn parse_block_expr(&mut self, opt_label: Option<Label>,
-                            lo: Span, blk_mode: BlockCheckMode,
-                            outer_attrs: ThinVec<Attribute>)
-                            -> PResult<'a, P<Expr>> {
+    crate fn parse_block_expr(
+        &mut self,
+        opt_label: Option<Label>,
+        lo: Span,
+        blk_mode: BlockCheckMode,
+        outer_attrs: ThinVec<Attribute>,
+    ) -> PResult<'a, P<Expr>> {
         self.expect(&token::OpenDelim(token::Brace))?;
 
         let mut attrs = outer_attrs;
@@ -3041,6 +2886,7 @@ impl<'a> Parser<'a> {
                 ExprKind::Await(ast::AwaitOrigin::FieldLike, self_arg),
                 ThinVec::new(),
             );
+            self.recover_from_await_method_call();
             return Ok(await_expr);
         }
         let segment = self.parse_path_segment(PathStyle::Expr)?;
@@ -3279,23 +3125,6 @@ impl<'a> Parser<'a> {
         return Ok(e);
     }
 
-    fn recover_seq_parse_error(
-        &mut self,
-        delim: token::DelimToken,
-        lo: Span,
-        result: PResult<'a, P<Expr>>,
-    ) -> P<Expr> {
-        match result {
-            Ok(x) => x,
-            Err(mut err) => {
-                err.emit();
-                // recover from parse error
-                self.consume_block(delim);
-                self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
-            }
-        }
-    }
-
     crate fn process_potential_macro_variable(&mut self) {
         let (token, span) = match self.token {
             token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
@@ -3574,7 +3403,8 @@ impl<'a> Parser<'a> {
             } else {
                 self.restrictions
             };
-            if op.precedence() < min_prec {
+            let prec = op.precedence();
+            if prec < min_prec {
                 break;
             }
             // Check for deprecated `...` syntax
@@ -3615,8 +3445,7 @@ impl<'a> Parser<'a> {
                 // We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
                 // two variants are handled with `parse_prefix_range_expr` call above.
                 let rhs = if self.is_at_start_of_range_notation_rhs() {
-                    Some(self.parse_assoc_expr_with(op.precedence() + 1,
-                                                    LhsExpr::NotYetParsed)?)
+                    Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
                 } else {
                     None
                 };
@@ -3636,28 +3465,18 @@ impl<'a> Parser<'a> {
                 break
             }
 
-            let rhs = match op.fixity() {
-                Fixity::Right => self.with_res(
-                    restrictions - Restrictions::STMT_EXPR,
-                    |this| {
-                        this.parse_assoc_expr_with(op.precedence(),
-                            LhsExpr::NotYetParsed)
-                }),
-                Fixity::Left => self.with_res(
-                    restrictions - Restrictions::STMT_EXPR,
-                    |this| {
-                        this.parse_assoc_expr_with(op.precedence() + 1,
-                            LhsExpr::NotYetParsed)
-                }),
+            let fixity = op.fixity();
+            let prec_adjustment = match fixity {
+                Fixity::Right => 0,
+                Fixity::Left => 1,
                 // We currently have no non-associative operators that are not handled above by
                 // the special cases. The code is here only for future convenience.
-                Fixity::None => self.with_res(
-                    restrictions - Restrictions::STMT_EXPR,
-                    |this| {
-                        this.parse_assoc_expr_with(op.precedence() + 1,
-                            LhsExpr::NotYetParsed)
-                }),
-            }?;
+                Fixity::None => 1,
+            };
+            let rhs = self.with_res(
+                restrictions - Restrictions::STMT_EXPR,
+                |this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
+            )?;
 
             // Make sure that the span of the parent node is larger than the span of lhs and rhs,
             // including the attributes.
@@ -3678,8 +3497,7 @@ impl<'a> Parser<'a> {
                     let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
                     self.mk_expr(span, binary, ThinVec::new())
                 }
-                AssocOp::Assign =>
-                    self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
+                AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
                 AssocOp::ObsoleteInPlace =>
                     self.mk_expr(span, ExprKind::ObsoleteInPlace(lhs, rhs), ThinVec::new()),
                 AssocOp::AssignOp(k) => {
@@ -3703,63 +3521,11 @@ impl<'a> Parser<'a> {
                 }
             };
 
-            if op.fixity() == Fixity::None { break }
+            if let Fixity::None = fixity { break }
         }
         Ok(lhs)
     }
 
-    fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
-        self.token.is_ident() &&
-            if let ast::ExprKind::Path(..) = node { true } else { false } &&
-            !self.token.is_reserved_ident() &&           // v `foo:bar(baz)`
-            self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) ||
-            self.look_ahead(1, |t| t == &token::Lt) &&     // `foo:bar<baz`
-            self.look_ahead(2, |t| t.is_ident()) ||
-            self.look_ahead(1, |t| t == &token::Colon) &&  // `foo:bar:baz`
-            self.look_ahead(2, |t| t.is_ident()) ||
-            self.look_ahead(1, |t| t == &token::ModSep) &&  // `foo:bar::baz`
-            self.look_ahead(2, |t| t.is_ident())
-    }
-
-    fn bad_type_ascription(
-        &self,
-        err: &mut DiagnosticBuilder<'a>,
-        lhs_span: Span,
-        cur_op_span: Span,
-        next_sp: Span,
-        maybe_path: bool,
-    ) {
-        err.span_label(self.span, "expecting a type here because of type ascription");
-        let cm = self.sess.source_map();
-        let next_pos = cm.lookup_char_pos(next_sp.lo());
-        let op_pos = cm.lookup_char_pos(cur_op_span.hi());
-        if op_pos.line != next_pos.line {
-            err.span_suggestion(
-                cur_op_span,
-                "try using a semicolon",
-                ";".to_string(),
-                Applicability::MaybeIncorrect,
-            );
-        } else {
-            if maybe_path {
-                err.span_suggestion(
-                    cur_op_span,
-                    "maybe you meant to write a path separator here",
-                    "::".to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-            } else {
-                err.note("type ascription is a nightly-only feature that lets \
-                          you annotate an expression with a type: `<expr>: <type>`");
-                err.span_note(
-                    lhs_span,
-                    "this expression expects an ascribed type after the colon",
-                );
-                err.help("this might be indicative of a syntax error elsewhere");
-            }
-        }
-    }
-
     fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
                            expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
                            -> PResult<'a, P<Expr>> {
@@ -3840,7 +3606,7 @@ impl<'a> Parser<'a> {
     /// Produce an error if comparison operators are chained (RFC #558).
     /// We only need to check lhs, not rhs, because all comparison ops
     /// have same precedence and are left-associative
-    fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) {
+    fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) {
         debug_assert!(outer_op.is_comparison(),
                       "check_no_chained_comparison: {:?} is not comparison",
                       outer_op);
@@ -4179,8 +3945,6 @@ impl<'a> Parser<'a> {
     }
 
     crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
-        maybe_whole!(self, NtArm, |x| x);
-
         let attrs = self.parse_outer_attributes()?;
         let pats = self.parse_pats()?;
         let guard = if self.eat_keyword(keywords::If) {
@@ -5043,92 +4807,6 @@ impl<'a> Parser<'a> {
         Ok(self.parse_stmt_(true))
     }
 
-    // Eat tokens until we can be relatively sure we reached the end of the
-    // statement. This is something of a best-effort heuristic.
-    //
-    // We terminate when we find an unmatched `}` (without consuming it).
-    fn recover_stmt(&mut self) {
-        self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
-    }
-
-    // If `break_on_semi` is `Break`, then we will stop consuming tokens after
-    // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
-    // approximate - it can mean we break too early due to macros, but that
-    // should only lead to sub-optimal recovery, not inaccurate parsing).
-    //
-    // If `break_on_block` is `Break`, then we will stop consuming tokens
-    // after finding (and consuming) a brace-delimited block.
-    fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
-        let mut brace_depth = 0;
-        let mut bracket_depth = 0;
-        let mut in_block = false;
-        debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
-               break_on_semi, break_on_block);
-        loop {
-            debug!("recover_stmt_ loop {:?}", self.token);
-            match self.token {
-                token::OpenDelim(token::DelimToken::Brace) => {
-                    brace_depth += 1;
-                    self.bump();
-                    if break_on_block == BlockMode::Break &&
-                       brace_depth == 1 &&
-                       bracket_depth == 0 {
-                        in_block = true;
-                    }
-                }
-                token::OpenDelim(token::DelimToken::Bracket) => {
-                    bracket_depth += 1;
-                    self.bump();
-                }
-                token::CloseDelim(token::DelimToken::Brace) => {
-                    if brace_depth == 0 {
-                        debug!("recover_stmt_ return - close delim {:?}", self.token);
-                        break;
-                    }
-                    brace_depth -= 1;
-                    self.bump();
-                    if in_block && bracket_depth == 0 && brace_depth == 0 {
-                        debug!("recover_stmt_ return - block end {:?}", self.token);
-                        break;
-                    }
-                }
-                token::CloseDelim(token::DelimToken::Bracket) => {
-                    bracket_depth -= 1;
-                    if bracket_depth < 0 {
-                        bracket_depth = 0;
-                    }
-                    self.bump();
-                }
-                token::Eof => {
-                    debug!("recover_stmt_ return - Eof");
-                    break;
-                }
-                token::Semi => {
-                    self.bump();
-                    if break_on_semi == SemiColonMode::Break &&
-                       brace_depth == 0 &&
-                       bracket_depth == 0 {
-                        debug!("recover_stmt_ return - Semi");
-                        break;
-                    }
-                }
-                token::Comma => {
-                    if break_on_semi == SemiColonMode::Comma &&
-                       brace_depth == 0 &&
-                       bracket_depth == 0 {
-                        debug!("recover_stmt_ return - Semi");
-                        break;
-                    } else {
-                        self.bump();
-                    }
-                }
-                _ => {
-                    self.bump()
-                }
-            }
-        }
-    }
-
     fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
         self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
             e.emit();
@@ -5137,7 +4815,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn is_async_block(&mut self) -> bool {
+    fn is_async_block(&self) -> bool {
         self.token.is_keyword(keywords::Async) &&
         (
             ( // `async move {`
@@ -5149,19 +4827,19 @@ impl<'a> Parser<'a> {
         )
     }
 
-    fn is_async_fn(&mut self) -> bool {
+    fn is_async_fn(&self) -> bool {
         self.token.is_keyword(keywords::Async) &&
             self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
     }
 
-    fn is_do_catch_block(&mut self) -> bool {
+    fn is_do_catch_block(&self) -> bool {
         self.token.is_keyword(keywords::Do) &&
         self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
         self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
         !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
-    fn is_try_block(&mut self) -> bool {
+    fn is_try_block(&self) -> bool {
         self.token.is_keyword(keywords::Try) &&
         self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
         self.span.rust_2018() &&
@@ -5183,7 +4861,7 @@ impl<'a> Parser<'a> {
         self.look_ahead(1, |t| t.is_keyword(keywords::Type))
     }
 
-    fn is_auto_trait_item(&mut self) -> bool {
+    fn is_auto_trait_item(&self) -> bool {
         // auto trait
         (self.token.is_keyword(keywords::Auto)
             && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
@@ -5225,7 +4903,7 @@ impl<'a> Parser<'a> {
 
                 (ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
             }
-            token::Ident(ident, _) if ident.name == "macro_rules" &&
+            token::Ident(ident, _) if ident.name == sym::macro_rules &&
                                    self.look_ahead(1, |t| *t == token::Not) => {
                 let prev_span = self.prev_span;
                 self.complain_if_pub_macro(&vis.node, prev_span);
@@ -5445,7 +5123,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Checks if this expression is a successfully parsed statement.
-    fn expr_is_complete(&mut self, e: &Expr) -> bool {
+    fn expr_is_complete(&self, e: &Expr) -> bool {
         self.restrictions.contains(Restrictions::STMT_EXPR) &&
             !classify::expr_requires_semi_to_be_stmt(e)
     }
@@ -5915,8 +5593,6 @@ impl<'a> Parser<'a> {
     ///                  | ( < lifetimes , typaramseq ( , )? > )
     /// where   typaramseq = ( typaram ) | ( typaram , typaramseq )
     fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
-        maybe_whole!(self, NtGenerics, |x| x);
-
         let span_lo = self.span;
         if self.eat_lt() {
             let params = self.parse_generic_params()?;
@@ -6169,8 +5845,6 @@ impl<'a> Parser<'a> {
     /// where T : Trait<U, V> + 'b, 'a : 'b
     /// ```
     fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
-        maybe_whole!(self, NtWhereClause, |x| x);
-
         let mut where_clause = WhereClause {
             id: ast::DUMMY_NODE_ID,
             predicates: Vec::new(),
@@ -6517,7 +6191,7 @@ impl<'a> Parser<'a> {
         Ok((id, generics))
     }
 
-    fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
+    fn mk_item(&self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
                attrs: Vec<Attribute>) -> P<Item> {
         P(Item {
             ident,
@@ -6549,7 +6223,7 @@ impl<'a> Parser<'a> {
 
     /// Returns `true` if we are looking at `const ID`
     /// (returns `false` for things like `const fn`, etc.).
-    fn is_const_item(&mut self) -> bool {
+    fn is_const_item(&self) -> bool {
         self.token.is_keyword(keywords::Const) &&
             !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
             !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
@@ -6657,7 +6331,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) {
+    fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
         match *vis {
             VisibilityKind::Inherited => {}
             _ => {
@@ -6686,7 +6360,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span)
+    fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span)
                                    -> DiagnosticBuilder<'a>
     {
         let expected_kinds = if item_type == "extern" {
@@ -7036,26 +6710,6 @@ impl<'a> Parser<'a> {
         Ok((class_name, ItemKind::Union(vdata, generics), None))
     }
 
-    fn consume_block(&mut self, delim: token::DelimToken) {
-        let mut brace_depth = 0;
-        loop {
-            if self.eat(&token::OpenDelim(delim)) {
-                brace_depth += 1;
-            } else if self.eat(&token::CloseDelim(delim)) {
-                if brace_depth == 0 {
-                    return;
-                } else {
-                    brace_depth -= 1;
-                    continue;
-                }
-            } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
-                return;
-            } else {
-                self.bump();
-            }
-        }
-    }
-
     fn parse_record_struct_body(
         &mut self,
     ) -> PResult<'a, (Vec<StructField>, /* recovered */ bool)> {
@@ -7357,7 +7011,8 @@ impl<'a> Parser<'a> {
                     let attr = Attribute {
                         id: attr::mk_attr_id(),
                         style: ast::AttrStyle::Outer,
-                        path: ast::Path::from_ident(Ident::from_str("warn_directory_ownership")),
+                        path: ast::Path::from_ident(
+                            Ident::with_empty_ctxt(sym::warn_directory_ownership)),
                         tokens: TokenStream::empty(),
                         is_sugared_doc: false,
                         span: syntax_pos::DUMMY_SP,
@@ -7389,7 +7044,7 @@ impl<'a> Parser<'a> {
     }
 
     fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
-        if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
+        if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) {
             self.directory.path.to_mut().push(&path.as_str());
             self.directory.ownership = DirectoryOwnership::Owned { relative: None };
         } else {
@@ -7409,7 +7064,7 @@ impl<'a> Parser<'a> {
     }
 
     pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
-        if let Some(s) = attr::first_attr_value_str_by_name(attrs, "path") {
+        if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
             let s = s.as_str();
 
             // On windows, the base path might have the form
@@ -8753,21 +8408,6 @@ impl<'a> Parser<'a> {
         ).emit();
     }
 
-    /// Recover from `pub` keyword in places where it seems _reasonable_ but isn't valid.
-    fn eat_bad_pub(&mut self) {
-        if self.token.is_keyword(keywords::Pub) {
-            match self.parse_visibility(false) {
-                Ok(vis) => {
-                    let mut err = self.diagnostic()
-                        .struct_span_err(vis.span, "unnecessary visibility qualifier");
-                    err.span_label(vis.span, "`pub` not permitted here");
-                    err.emit();
-                }
-                Err(mut err) => err.emit(),
-            }
-        }
-    }
-
     /// When lowering a `async fn` to the HIR, we need to move all of the arguments of the function
     /// into the generated closure so that they are dropped when the future is polled and not when
     /// it is created.
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index fd7a39c576d..068fc41c87a 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -19,7 +19,7 @@ use log::info;
 use std::fmt;
 use std::mem;
 #[cfg(target_arch = "x86_64")]
-use rustc_data_structures::static_assert;
+use rustc_data_structures::static_assert_size;
 use rustc_data_structures::sync::Lrc;
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -61,6 +61,7 @@ impl DelimToken {
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum Lit {
+    Bool(ast::Name), // AST only, must never appear in a `Token`
     Byte(ast::Name),
     Char(ast::Name),
     Err(ast::Name),
@@ -72,9 +73,13 @@ pub enum Lit {
     ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
 }
 
+#[cfg(target_arch = "x86_64")]
+static_assert_size!(Lit, 8);
+
 impl Lit {
     crate fn literal_name(&self) -> &'static str {
         match *self {
+            Bool(_) => panic!("literal token contains `Lit::Bool`"),
             Byte(_) => "byte literal",
             Char(_) => "char literal",
             Err(_) => "invalid literal",
@@ -85,6 +90,13 @@ impl Lit {
         }
     }
 
+    crate fn may_have_suffix(&self) -> bool {
+        match *self {
+            Integer(..) | Float(..) => true,
+            _ => false,
+        }
+    }
+
     // See comments in `Nonterminal::to_tokenstream` for why we care about
     // *probably* equal here rather than actual equality
     fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
@@ -208,7 +220,7 @@ pub enum Token {
 
 // `Token` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(target_arch = "x86_64")]
-static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16);
+static_assert_size!(Token, 16);
 
 impl Token {
     /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
@@ -585,14 +597,12 @@ pub enum Nonterminal {
     NtPath(ast::Path),
     NtVis(ast::Visibility),
     NtTT(TokenTree),
-    // These are not exposed to macros, but are used by quasiquote.
-    NtArm(ast::Arm),
-    NtImplItem(ast::ImplItem),
+    // Used only for passing items to proc macro attributes (they are not
+    // strictly necessary for that, `Annotatable` can be converted into
+    // tokens directly, but doing that naively regresses pretty-printing).
     NtTraitItem(ast::TraitItem),
+    NtImplItem(ast::ImplItem),
     NtForeignItem(ast::ForeignItem),
-    NtGenerics(ast::Generics),
-    NtWhereClause(ast::WhereClause),
-    NtArg(ast::Arg),
 }
 
 impl PartialEq for Nonterminal {
@@ -625,13 +635,9 @@ impl fmt::Debug for Nonterminal {
             NtMeta(..) => f.pad("NtMeta(..)"),
             NtPath(..) => f.pad("NtPath(..)"),
             NtTT(..) => f.pad("NtTT(..)"),
-            NtArm(..) => f.pad("NtArm(..)"),
             NtImplItem(..) => f.pad("NtImplItem(..)"),
             NtTraitItem(..) => f.pad("NtTraitItem(..)"),
             NtForeignItem(..) => f.pad("NtForeignItem(..)"),
-            NtGenerics(..) => f.pad("NtGenerics(..)"),
-            NtWhereClause(..) => f.pad("NtWhereClause(..)"),
-            NtArg(..) => f.pad("NtArg(..)"),
             NtVis(..) => f.pad("NtVis(..)"),
             NtLifetime(..) => f.pad("NtLifetime(..)"),
         }
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 682621d40ab..cd86d94f4b8 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -13,17 +13,15 @@ use crate::print::pp::{self, Breaks};
 use crate::print::pp::Breaks::{Consistent, Inconsistent};
 use crate::ptr::P;
 use crate::std_inject;
-use crate::symbol::keywords;
+use crate::symbol::{keywords, sym};
 use crate::tokenstream::{self, TokenStream, TokenTree};
 
 use rustc_target::spec::abi::{self, Abi};
 use syntax_pos::{self, BytePos};
 use syntax_pos::{DUMMY_SP, FileName};
 
-use std::ascii;
 use std::borrow::Cow;
 use std::io::{self, Write, Read};
-use std::iter::Peekable;
 use std::vec;
 
 pub enum AnnNode<'a> {
@@ -49,8 +47,7 @@ impl PpAnn for NoAnn {}
 pub struct State<'a> {
     pub s: pp::Printer<'a>,
     cm: Option<&'a SourceMap>,
-    comments: Option<Vec<comments::Comment> >,
-    literals: Peekable<vec::IntoIter<comments::Literal>>,
+    comments: Option<Vec<comments::Comment>>,
     cur_cmnt: usize,
     boxes: Vec<pp::Breaks>,
     ann: &'a (dyn PpAnn+'a),
@@ -62,7 +59,6 @@ fn rust_printer<'a>(writer: Box<dyn Write+'a>, ann: &'a dyn PpAnn) -> State<'a>
         s: pp::mk_printer(writer, DEFAULT_COLUMNS),
         cm: None,
         comments: None,
-        literals: vec![].into_iter().peekable(),
         cur_cmnt: 0,
         boxes: Vec::new(),
         ann,
@@ -75,8 +71,7 @@ pub const INDENT_UNIT: usize = 4;
 pub const DEFAULT_COLUMNS: usize = 78;
 
 /// Requires you to pass an input filename and reader so that
-/// it can scan the input text for comments and literals to
-/// copy forward.
+/// it can scan the input text for comments to copy forward.
 pub fn print_crate<'a>(cm: &'a SourceMap,
                        sess: &ParseSess,
                        krate: &ast::Crate,
@@ -94,13 +89,14 @@ pub fn print_crate<'a>(cm: &'a SourceMap,
         // of the feature gate, so we fake them up here.
 
         // #![feature(prelude_import)]
-        let pi_nested = attr::mk_nested_word_item(ast::Ident::from_str("prelude_import"));
-        let list = attr::mk_list_item(DUMMY_SP, ast::Ident::from_str("feature"), vec![pi_nested]);
+        let pi_nested = attr::mk_nested_word_item(ast::Ident::with_empty_ctxt(sym::prelude_import));
+        let list = attr::mk_list_item(
+            DUMMY_SP, ast::Ident::with_empty_ctxt(sym::feature), vec![pi_nested]);
         let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), list);
         s.print_attribute(&fake_attr)?;
 
         // #![no_std]
-        let no_std_meta = attr::mk_word_item(ast::Ident::from_str("no_std"));
+        let no_std_meta = attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::no_std));
         let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), no_std_meta);
         s.print_attribute(&fake_attr)?;
     }
@@ -118,36 +114,23 @@ impl<'a> State<'a> {
                           out: Box<dyn Write+'a>,
                           ann: &'a dyn PpAnn,
                           is_expanded: bool) -> State<'a> {
-        let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);
-
-        State::new(
-            cm,
-            out,
-            ann,
-            Some(cmnts),
-            // If the code is post expansion, don't use the table of
-            // literals, since it doesn't correspond with the literals
-            // in the AST anymore.
-            if is_expanded { None } else { Some(lits) },
-            is_expanded
-        )
+        let comments = comments::gather_comments(sess, filename, input);
+        State::new(cm, out, ann, Some(comments), is_expanded)
     }
 
     pub fn new(cm: &'a SourceMap,
                out: Box<dyn Write+'a>,
                ann: &'a dyn PpAnn,
                comments: Option<Vec<comments::Comment>>,
-               literals: Option<Vec<comments::Literal>>,
                is_expanded: bool) -> State<'a> {
         State {
             s: pp::mk_printer(out, DEFAULT_COLUMNS),
             cm: Some(cm),
             comments,
-            literals: literals.unwrap_or_default().into_iter().peekable(),
             cur_cmnt: 0,
             boxes: Vec::new(),
             ann,
-            is_expanded: is_expanded
+            is_expanded,
         }
     }
 }
@@ -180,6 +163,31 @@ fn binop_to_string(op: BinOpToken) -> &'static str {
     }
 }
 
+pub fn literal_to_string(lit: token::Lit, suffix: Option<ast::Name>) -> String {
+    let mut out = match lit {
+        token::Byte(b)           => format!("b'{}'", b),
+        token::Char(c)           => format!("'{}'", c),
+        token::Err(c)            => format!("'{}'", c),
+        token::Bool(c)           |
+        token::Float(c)          |
+        token::Integer(c)        => c.to_string(),
+        token::Str_(s)           => format!("\"{}\"", s),
+        token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
+                                            delim="#".repeat(n as usize),
+                                            string=s),
+        token::ByteStr(v)        => format!("b\"{}\"", v),
+        token::ByteStrRaw(s, n)  => format!("br{delim}\"{string}\"{delim}",
+                                            delim="#".repeat(n as usize),
+                                            string=s),
+    };
+
+    if let Some(suffix) = suffix {
+        out.push_str(&suffix.as_str())
+    }
+
+    out
+}
+
 pub fn token_to_string(tok: &Token) -> String {
     match *tok {
         token::Eq                   => "=".to_string(),
@@ -223,29 +231,7 @@ pub fn token_to_string(tok: &Token) -> String {
         token::SingleQuote          => "'".to_string(),
 
         /* Literals */
-        token::Literal(lit, suf) => {
-            let mut out = match lit {
-                token::Byte(b)           => format!("b'{}'", b),
-                token::Char(c)           => format!("'{}'", c),
-                token::Err(c)            => format!("'{}'", c),
-                token::Float(c)          |
-                token::Integer(c)        => c.to_string(),
-                token::Str_(s)           => format!("\"{}\"", s),
-                token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
-                                                    delim="#".repeat(n as usize),
-                                                    string=s),
-                token::ByteStr(v)         => format!("b\"{}\"", v),
-                token::ByteStrRaw(s, n)   => format!("br{delim}\"{string}\"{delim}",
-                                                    delim="#".repeat(n as usize),
-                                                    string=s),
-            };
-
-            if let Some(s) = suf {
-                out.push_str(&s.as_str())
-            }
-
-            out
-        }
+        token::Literal(lit, suf) => literal_to_string(lit, suf),
 
         /* Name components */
         token::Ident(s, false)      => s.to_string(),
@@ -278,12 +264,8 @@ pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
         token::NtLifetime(e)        => ident_to_string(e),
         token::NtLiteral(ref e)     => expr_to_string(e),
         token::NtTT(ref tree)       => tt_to_string(tree.clone()),
-        token::NtArm(ref e)         => arm_to_string(e),
         token::NtImplItem(ref e)    => impl_item_to_string(e),
         token::NtTraitItem(ref e)   => trait_item_to_string(e),
-        token::NtGenerics(ref e)    => generic_params_to_string(&e.params),
-        token::NtWhereClause(ref e) => where_clause_to_string(e),
-        token::NtArg(ref e)         => arg_to_string(e),
         token::NtVis(ref e)         => vis_to_string(e),
         token::NtForeignItem(ref e) => foreign_item_to_string(e),
     }
@@ -438,8 +420,6 @@ pub trait PrintState<'a> {
     fn boxes(&mut self) -> &mut Vec<pp::Breaks>;
     fn comments(&mut self) -> &mut Option<Vec<comments::Comment>>;
     fn cur_cmnt(&mut self) -> &mut usize;
-    fn cur_lit(&mut self) -> Option<&comments::Literal>;
-    fn bump_lit(&mut self) -> Option<comments::Literal>;
 
     fn word_space<S: Into<Cow<'static, str>>>(&mut self, w: S) -> io::Result<()> {
         self.writer().word(w)?;
@@ -504,21 +484,6 @@ pub trait PrintState<'a> {
         self.end()
     }
 
-    fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
-        while let Some(ltrl) = self.cur_lit().cloned() {
-            if ltrl.pos > pos { break; }
-
-            // we don't need the value here since we're forced to clone cur_lit
-            // due to lack of NLL.
-            self.bump_lit();
-            if ltrl.pos == pos {
-                return Some(ltrl);
-            }
-        }
-
-        None
-    }
-
     fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> {
         while let Some(ref cmnt) = self.next_comment() {
             if cmnt.pos < pos {
@@ -606,60 +571,7 @@ pub trait PrintState<'a> {
 
     fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
         self.maybe_print_comment(lit.span.lo())?;
-        if let Some(ltrl) = self.next_lit(lit.span.lo()) {
-            return self.writer().word(ltrl.lit.clone());
-        }
-        match lit.node {
-            ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
-            ast::LitKind::Err(st) => {
-                let st = st.as_str().escape_debug().to_string();
-                let mut res = String::with_capacity(st.len() + 2);
-                res.push('\'');
-                res.push_str(&st);
-                res.push('\'');
-                self.writer().word(res)
-            }
-            ast::LitKind::Byte(byte) => {
-                let mut res = String::from("b'");
-                res.extend(ascii::escape_default(byte).map(|c| c as char));
-                res.push('\'');
-                self.writer().word(res)
-            }
-            ast::LitKind::Char(ch) => {
-                let mut res = String::from("'");
-                res.extend(ch.escape_default());
-                res.push('\'');
-                self.writer().word(res)
-            }
-            ast::LitKind::Int(i, t) => {
-                match t {
-                    ast::LitIntType::Signed(st) => {
-                        self.writer().word(st.val_to_string(i as i128))
-                    }
-                    ast::LitIntType::Unsigned(ut) => {
-                        self.writer().word(ut.val_to_string(i))
-                    }
-                    ast::LitIntType::Unsuffixed => {
-                        self.writer().word(i.to_string())
-                    }
-                }
-            }
-            ast::LitKind::Float(ref f, t) => {
-                self.writer().word(format!("{}{}", &f, t.ty_to_string()))
-            }
-            ast::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().to_string()),
-            ast::LitKind::Bool(val) => {
-                if val { self.writer().word("true") } else { self.writer().word("false") }
-            }
-            ast::LitKind::ByteStr(ref v) => {
-                let mut escaped: String = String::new();
-                for &ch in v.iter() {
-                    escaped.extend(ascii::escape_default(ch)
-                                         .map(|c| c as char));
-                }
-                self.writer().word(format!("b\"{}\"", escaped))
-            }
-        }
+        self.writer().word(literal_to_string(lit.token, lit.suffix))
     }
 
     fn print_string(&mut self, st: &str,
@@ -880,14 +792,6 @@ impl<'a> PrintState<'a> for State<'a> {
     fn cur_cmnt(&mut self) -> &mut usize {
         &mut self.cur_cmnt
     }
-
-    fn cur_lit(&mut self) -> Option<&comments::Literal> {
-        self.literals.peek()
-    }
-
-    fn bump_lit(&mut self) -> Option<comments::Literal> {
-        self.literals.next()
-    }
 }
 
 impl<'a> State<'a> {
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index b9758bd655c..1be7986ad53 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -2,7 +2,7 @@ use crate::ast;
 use crate::attr;
 use crate::edition::Edition;
 use crate::ext::hygiene::{Mark, SyntaxContext};
-use crate::symbol::{Symbol, keywords};
+use crate::symbol::{Ident, Symbol, keywords, sym};
 use crate::source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan};
 use crate::ptr::P;
 use crate::tokenstream::TokenStream;
@@ -35,6 +35,9 @@ pub fn injected_crate_name() -> Option<&'static str> {
 }
 
 thread_local! {
+    // A `Symbol` might make more sense here, but it doesn't work, probably for
+    // reasons relating to the use of thread-local storage for the Symbol
+    // interner.
     static INJECTED_CRATE_NAME: Cell<Option<&'static str>> = Cell::new(None);
 }
 
@@ -46,10 +49,10 @@ pub fn maybe_inject_crates_ref(
     let rust_2018 = edition >= Edition::Edition2018;
 
     // the first name in this list is the crate name of the crate with the prelude
-    let names: &[&str] = if attr::contains_name(&krate.attrs, "no_core") {
+    let names: &[&str] = if attr::contains_name(&krate.attrs, sym::no_core) {
         return krate;
-    } else if attr::contains_name(&krate.attrs, "no_std") {
-        if attr::contains_name(&krate.attrs, "compiler_builtins") {
+    } else if attr::contains_name(&krate.attrs, sym::no_std) {
+        if attr::contains_name(&krate.attrs, sym::compiler_builtins) {
             &["core"]
         } else {
             &["core", "compiler_builtins"]
@@ -60,26 +63,25 @@ pub fn maybe_inject_crates_ref(
 
     // .rev() to preserve ordering above in combination with insert(0, ...)
     let alt_std_name = alt_std_name.map(Symbol::intern);
-    for orig_name in names.iter().rev() {
-        let orig_name = Symbol::intern(orig_name);
-        let mut rename = orig_name;
+    for orig_name_str in names.iter().rev() {
         // HACK(eddyb) gensym the injected crates on the Rust 2018 edition,
         // so they don't accidentally interfere with the new import paths.
-        if rust_2018 {
-            rename = orig_name.gensymed();
-        }
-        let orig_name = if rename != orig_name {
-            Some(orig_name)
+        let orig_name_sym = Symbol::intern(orig_name_str);
+        let orig_name_ident = Ident::with_empty_ctxt(orig_name_sym);
+        let (rename, orig_name) = if rust_2018 {
+            (orig_name_ident.gensym(), Some(orig_name_sym))
         } else {
-            None
+            (orig_name_ident, None)
         };
         krate.module.items.insert(0, P(ast::Item {
-            attrs: vec![attr::mk_attr_outer(DUMMY_SP,
-                                            attr::mk_attr_id(),
-                                            attr::mk_word_item(ast::Ident::from_str("macro_use")))],
+            attrs: vec![attr::mk_attr_outer(
+                DUMMY_SP,
+                attr::mk_attr_id(),
+                attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::macro_use))
+            )],
             vis: dummy_spanned(ast::VisibilityKind::Inherited),
             node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)),
-            ident: ast::Ident::with_empty_ctxt(rename),
+            ident: rename,
             id: ast::DUMMY_NODE_ID,
             span: DUMMY_SP,
             tokens: None,
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 03d0eff266e..3dc7aad9459 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -29,7 +29,7 @@ use crate::parse::{token, ParseSess};
 use crate::print::pprust;
 use crate::ast::{self, Ident};
 use crate::ptr::P;
-use crate::symbol::{self, Symbol, keywords};
+use crate::symbol::{self, Symbol, keywords, sym};
 use crate::ThinVec;
 
 struct Test {
@@ -65,8 +65,7 @@ pub fn modify_for_testing(sess: &ParseSess,
     // unconditional, so that the attribute is still marked as used in
     // non-test builds.
     let reexport_test_harness_main =
-        attr::first_attr_value_str_by_name(&krate.attrs,
-                                           "reexport_test_harness_main");
+        attr::first_attr_value_str_by_name(&krate.attrs, sym::reexport_test_harness_main);
 
     // Do this here so that the test_runner crate attribute gets marked as used
     // even in non-test builds
@@ -172,7 +171,7 @@ impl MutVisitor for EntryPointCleaner {
             EntryPointType::MainAttr |
             EntryPointType::Start =>
                 item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
-                    let allow_ident = Ident::from_str("allow");
+                    let allow_ident = Ident::with_empty_ctxt(sym::allow);
                     let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
                     let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident,
                                                                   vec![dc_nested]);
@@ -185,7 +184,7 @@ impl MutVisitor for EntryPointCleaner {
                         ident,
                         attrs: attrs.into_iter()
                             .filter(|attr| {
-                                !attr.check_name("main") && !attr.check_name("start")
+                                !attr.check_name(sym::main) && !attr.check_name(sym::start)
                             })
                             .chain(iter::once(allow_dead_code))
                             .collect(),
@@ -216,7 +215,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
                    tests: Vec<Ident>,
                    tested_submods: Vec<(Ident, Ident)>)
                    -> (P<ast::Item>, Ident) {
-    let super_ = Ident::from_str("super");
+    let super_ = Ident::with_empty_ctxt(keywords::Super.name());
 
     let items = tests.into_iter().map(|r| {
         cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
@@ -233,11 +232,11 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
         items,
     };
 
-    let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports"));
+    let name = Ident::from_str("__test_reexports").gensym();
     let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
     cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
     let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item {
-        ident: sym,
+        ident: name,
         attrs: Vec::new(),
         id: ast::DUMMY_NODE_ID,
         node: ast::ItemKind::Mod(reexport_mod),
@@ -246,7 +245,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
         tokens: None,
     })).pop().unwrap();
 
-    (it, sym)
+    (it, name)
 }
 
 /// Crawl over the crate, inserting test reexports and the test main function
@@ -273,7 +272,8 @@ fn generate_test_harness(sess: &ParseSess,
         test_cases: Vec::new(),
         reexport_test_harness_main,
         // N.B., doesn't consider the value of `--crate-name` passed on the command line.
-        is_libtest: attr::find_crate_name(&krate.attrs).map(|s| s == "test").unwrap_or(false),
+        is_libtest: attr::find_crate_name(&krate.attrs)
+            .map(|s| s == sym::test).unwrap_or(false),
         toplevel_reexport: None,
         ctxt: SyntaxContext::empty().apply_mark(mark),
         features,
@@ -373,9 +373,10 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
                            main_body);
 
     // Honor the reexport_test_harness_main attribute
-    let main_id = Ident::new(
-        cx.reexport_test_harness_main.unwrap_or(Symbol::gensym("main")),
-        sp);
+    let main_id = match cx.reexport_test_harness_main {
+        Some(sym) => Ident::new(sym, sp),
+        None => Ident::from_str_and_span("main", sp).gensym(),
+    };
 
     P(ast::Item {
         ident: main_id,
@@ -428,11 +429,11 @@ fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{
 }
 
 fn is_test_case(i: &ast::Item) -> bool {
-    attr::contains_name(&i.attrs, "rustc_test_marker")
+    attr::contains_name(&i.attrs, sym::rustc_test_marker)
 }
 
 fn get_test_runner(sd: &errors::Handler, krate: &ast::Crate) -> Option<ast::Path> {
-    let test_attr = attr::find_by_name(&krate.attrs, "test_runner")?;
+    let test_attr = attr::find_by_name(&krate.attrs, sym::test_runner)?;
     test_attr.meta_item_list().map(|meta_list| {
         if meta_list.len() != 1 {
             sd.span_fatal(test_attr.span,
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 93b5ecadd14..3cb16c30a50 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -21,7 +21,7 @@ use crate::print::pprust;
 
 use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
 #[cfg(target_arch = "x86_64")]
-use rustc_data_structures::static_assert;
+use rustc_data_structures::static_assert_size;
 use rustc_data_structures::sync::Lrc;
 use serialize::{Decoder, Decodable, Encoder, Encodable};
 use smallvec::{SmallVec, smallvec};
@@ -158,7 +158,7 @@ pub type TreeAndJoint = (TokenTree, IsJoint);
 
 // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(target_arch = "x86_64")]
-static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 8);
+static_assert_size!(TokenStream, 8);
 
 #[derive(Clone, Copy, Debug, PartialEq)]
 pub enum IsJoint {