about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs2
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs21
-rw-r--r--src/libsyntax/parse/literal.rs192
-rw-r--r--src/libsyntax/parse/mod.rs270
-rw-r--r--src/libsyntax/parse/parser.rs423
-rw-r--r--src/libsyntax/parse/parser/attr.rs (renamed from src/libsyntax/parse/attr.rs)44
-rw-r--r--src/libsyntax/parse/parser/diagnostics.rs (renamed from src/libsyntax/parse/diagnostics.rs)327
-rw-r--r--src/libsyntax/parse/parser/expr.rs199
-rw-r--r--src/libsyntax/parse/parser/generics.rs2
-rw-r--r--src/libsyntax/parse/parser/item.rs948
-rw-r--r--src/libsyntax/parse/parser/module.rs12
-rw-r--r--src/libsyntax/parse/parser/pat.rs68
-rw-r--r--src/libsyntax/parse/parser/path.rs19
-rw-r--r--src/libsyntax/parse/parser/stmt.rs13
-rw-r--r--src/libsyntax/parse/parser/ty.rs36
-rw-r--r--src/libsyntax/parse/token.rs153
16 files changed, 1427 insertions, 1302 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index ac3feadce3a..e6dc9a4c134 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,5 +1,5 @@
-use crate::parse::ParseSess;
 use crate::parse::token::{self, Token, TokenKind};
+use crate::sess::ParseSess;
 use crate::symbol::{sym, Symbol};
 use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
 
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index e5ba7e45309..b4dd23c9f9b 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,3 +1,4 @@
+use rustc_data_structures::fx::FxHashMap;
 use syntax_pos::Span;
 
 use crate::print::pprust::token_to_string;
@@ -16,6 +17,7 @@ impl<'a> StringReader<'a> {
             unmatched_braces: Vec::new(),
             matching_delim_spans: Vec::new(),
             last_unclosed_found_span: None,
+            last_delim_empty_block_spans: FxHashMap::default()
         };
         let res = tt_reader.parse_all_token_trees();
         (res, tt_reader.unmatched_braces)
@@ -34,6 +36,7 @@ struct TokenTreesReader<'a> {
     /// Used only for error recovery when arriving to EOF with mismatched braces.
     matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
     last_unclosed_found_span: Option<Span>,
+    last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>
 }
 
 impl<'a> TokenTreesReader<'a> {
@@ -121,13 +124,20 @@ impl<'a> TokenTreesReader<'a> {
                     // Correct delimiter.
                     token::CloseDelim(d) if d == delim => {
                         let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
+                        let close_brace_span = self.token.span;
+
+                        if tts.is_empty() {
+                            let empty_block_span = open_brace_span.to(close_brace_span);
+                            self.last_delim_empty_block_spans.insert(delim, empty_block_span);
+                        }
+
                         if self.open_braces.len() == 0 {
                             // Clear up these spans to avoid suggesting them as we've found
                             // properly matched delimiters so far for an entire block.
                             self.matching_delim_spans.clear();
                         } else {
                             self.matching_delim_spans.push(
-                                (open_brace, open_brace_span, self.token.span),
+                                (open_brace, open_brace_span, close_brace_span),
                             );
                         }
                         // Parse the close delimiter.
@@ -193,13 +203,20 @@ impl<'a> TokenTreesReader<'a> {
                     tts.into()
                 ).into())
             },
-            token::CloseDelim(_) => {
+            token::CloseDelim(delim) => {
                 // An unexpected closing delimiter (i.e., there is no
                 // matching opening delimiter).
                 let token_str = token_to_string(&self.token);
                 let msg = format!("unexpected close delimiter: `{}`", token_str);
                 let mut err = self.string_reader.sess.span_diagnostic
                     .struct_span_err(self.token.span, &msg);
+
+                if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
+                    err.span_label(
+                        span,
+                        "this block is empty, you might have not meant to close it"
+                    );
+                }
                 err.span_label(self.token.span, "unexpected close delimiter");
                 Err(err)
             },
diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs
index fcd5b2782fd..7952e293a53 100644
--- a/src/libsyntax/parse/literal.rs
+++ b/src/libsyntax/parse/literal.rs
@@ -1,14 +1,10 @@
 //! Code related to parsing literals.
 
 use crate::ast::{self, Lit, LitKind};
-use crate::parse::parser::Parser;
-use crate::parse::PResult;
-use crate::parse::token::{self, Token, TokenKind};
-use crate::print::pprust;
+use crate::parse::token::{self, Token};
 use crate::symbol::{kw, sym, Symbol};
-use crate::tokenstream::{TokenStream, TokenTree};
+use crate::tokenstream::TokenTree;
 
-use errors::{Applicability, Handler};
 use log::debug;
 use rustc_data_structures::sync::Lrc;
 use syntax_pos::Span;
@@ -28,72 +24,6 @@ crate enum LitError {
     IntTooLarge,
 }
 
-impl LitError {
-    fn report(&self, diag: &Handler, lit: token::Lit, span: Span) {
-        let token::Lit { kind, suffix, .. } = lit;
-        match *self {
-            // `NotLiteral` is not an error by itself, so we don't report
-            // it and give the parser opportunity to try something else.
-            LitError::NotLiteral => {}
-            // `LexerError` *is* an error, but it was already reported
-            // by lexer, so here we don't report it the second time.
-            LitError::LexerError => {}
-            LitError::InvalidSuffix => {
-                expect_no_suffix(
-                    diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix
-                );
-            }
-            LitError::InvalidIntSuffix => {
-                let suf = suffix.expect("suffix error with no suffix").as_str();
-                if looks_like_width_suffix(&['i', 'u'], &suf) {
-                    // If it looks like a width, try to be helpful.
-                    let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
-                    diag.struct_span_err(span, &msg)
-                        .help("valid widths are 8, 16, 32, 64 and 128")
-                        .emit();
-                } else {
-                    let msg = format!("invalid suffix `{}` for integer literal", suf);
-                    diag.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{}`", suf))
-                        .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
-                        .emit();
-                }
-            }
-            LitError::InvalidFloatSuffix => {
-                let suf = suffix.expect("suffix error with no suffix").as_str();
-                if looks_like_width_suffix(&['f'], &suf) {
-                    // If it looks like a width, try to be helpful.
-                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
-                    diag.struct_span_err(span, &msg)
-                        .help("valid widths are 32 and 64")
-                        .emit();
-                } else {
-                    let msg = format!("invalid suffix `{}` for float literal", suf);
-                    diag.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{}`", suf))
-                        .help("valid suffixes are `f32` and `f64`")
-                        .emit();
-                }
-            }
-            LitError::NonDecimalFloat(base) => {
-                let descr = match base {
-                    16 => "hexadecimal",
-                    8 => "octal",
-                    2 => "binary",
-                    _ => unreachable!(),
-                };
-                diag.struct_span_err(span, &format!("{} float literal is not supported", descr))
-                    .span_label(span, "not supported")
-                    .emit();
-            }
-            LitError::IntTooLarge => {
-                diag.struct_span_err(span, "integer literal is too large")
-                    .emit();
-            }
-        }
-    }
-}
-
 impl LitKind {
     /// Converts literal token into a semantic literal.
     fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
@@ -204,7 +134,7 @@ impl LitKind {
         let (kind, symbol, suffix) = match *self {
             LitKind::Str(symbol, ast::StrStyle::Cooked) => {
                 // Don't re-intern unless the escaped string is different.
-                let s = &symbol.as_str();
+                let s: &str = &symbol.as_str();
                 let escaped = s.escape_default().to_string();
                 let symbol = if escaped == *s { symbol } else { Symbol::intern(&escaped) };
                 (token::Str, symbol, None)
@@ -254,7 +184,7 @@ impl LitKind {
 
 impl Lit {
     /// Converts literal token into an AST literal.
-    fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
+    crate fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
         Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
     }
 
@@ -286,109 +216,16 @@ impl Lit {
         Lit { token: kind.to_lit_token(), kind, span }
     }
 
-    /// Losslessly convert an AST literal into a token stream.
-    crate fn tokens(&self) -> TokenStream {
+    /// Losslessly convert an AST literal into a token tree.
+    crate fn token_tree(&self) -> TokenTree {
         let token = match self.token.kind {
             token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
-        TokenTree::token(token, self.span).into()
+        TokenTree::token(token, self.span)
     }
 }
 
-impl<'a> Parser<'a> {
-    /// Matches `lit = true | false | token_lit`.
-    crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
-        let mut recovered = None;
-        if self.token == token::Dot {
-            // Attempt to recover `.4` as `0.4`.
-            recovered = self.look_ahead(1, |next_token| {
-                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
-                        = next_token.kind {
-                    if self.token.span.hi() == next_token.span.lo() {
-                        let s = String::from("0.") + &symbol.as_str();
-                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
-                        return Some(Token::new(kind, self.token.span.to(next_token.span)));
-                    }
-                }
-                None
-            });
-            if let Some(token) = &recovered {
-                self.bump();
-                self.diagnostic()
-                    .struct_span_err(token.span, "float literals must have an integer part")
-                    .span_suggestion(
-                        token.span,
-                        "must have an integer part",
-                        pprust::token_to_string(token),
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
-            }
-        }
-
-        let token = recovered.as_ref().unwrap_or(&self.token);
-        match Lit::from_token(token) {
-            Ok(lit) => {
-                self.bump();
-                Ok(lit)
-            }
-            Err(LitError::NotLiteral) => {
-                let msg = format!("unexpected token: {}", self.this_token_descr());
-                Err(self.span_fatal(token.span, &msg))
-            }
-            Err(err) => {
-                let (lit, span) = (token.expect_lit(), token.span);
-                self.bump();
-                err.report(&self.sess.span_diagnostic, lit, span);
-                // Pack possible quotes and prefixes from the original literal into
-                // the error literal's symbol so they can be pretty-printed faithfully.
-                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
-                let symbol = Symbol::intern(&suffixless_lit.to_string());
-                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
-                Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
-            }
-        }
-    }
-}
-
-crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option<Symbol>) {
-    if let Some(suf) = suffix {
-        let mut err = if kind == "a tuple index" &&
-                         [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) {
-            // #59553: warn instead of reject out of hand to allow the fix to percolate
-            // through the ecosystem when people fix their macros
-            let mut err = diag.struct_span_warn(
-                sp,
-                &format!("suffixes on {} are invalid", kind),
-            );
-            err.note(&format!(
-                "`{}` is *temporarily* accepted on tuple index fields as it was \
-                    incorrectly accepted on stable for a few releases",
-                suf,
-            ));
-            err.help(
-                "on proc macros, you'll want to use `syn::Index::from` or \
-                    `proc_macro::Literal::*_unsuffixed` for code that will desugar \
-                    to tuple field access",
-            );
-            err.note(
-                "for more context, see https://github.com/rust-lang/rust/issues/60210",
-            );
-            err
-        } else {
-            diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
-        };
-        err.span_label(sp, format!("invalid suffix `{}`", suf));
-        err.emit();
-    }
-}
-
-// Checks if `s` looks like i32 or u1234 etc.
-fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
-    s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
-}
-
 fn strip_underscores(symbol: Symbol) -> Symbol {
     // Do not allocate a new string unless necessary.
     let s = symbol.as_str();
@@ -426,15 +263,12 @@ fn integer_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitErr
     let symbol = strip_underscores(symbol);
     let s = symbol.as_str();
 
-    let mut base = 10;
-    if s.len() > 1 && s.as_bytes()[0] == b'0' {
-        match s.as_bytes()[1] {
-            b'x' => base = 16,
-            b'o' => base = 8,
-            b'b' => base = 2,
-            _ => {}
-        }
-    }
+    let base = match s.as_bytes() {
+        [b'0', b'x', ..] => 16,
+        [b'0', b'o', ..] => 8,
+        [b'0', b'b', ..] => 2,
+        _ => 10,
+    };
 
     let ty = match suffix {
         Some(suf) => match suf {
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index fa4c1043122..e6b794a6a99 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -1,40 +1,33 @@
 //! The main parser interface.
 
-use crate::ast::{self, CrateConfig, NodeId};
-use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
-use crate::source_map::{SourceMap, FilePathMapping};
-use crate::feature_gate::UnstableFeatures;
-use crate::parse::parser::Parser;
-use crate::parse::parser::emit_unclosed_delims;
-use crate::parse::token::TokenKind;
-use crate::tokenstream::{TokenStream, TokenTree};
+use crate::ast;
+use crate::parse::parser::{Parser, emit_unclosed_delims};
+use crate::parse::token::Nonterminal;
+use crate::tokenstream::{self, TokenStream, TokenTree};
 use crate::print::pprust;
-use crate::symbol::Symbol;
+use crate::sess::ParseSess;
 
-use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
-use rustc_data_structures::fx::{FxHashSet, FxHashMap};
+use errors::{FatalError, Level, Diagnostic, DiagnosticBuilder};
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert_size;
-use rustc_data_structures::sync::{Lrc, Lock, Once};
-use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use syntax_pos::edition::Edition;
-use syntax_pos::hygiene::ExpnId;
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::{Span, SourceFile, FileName};
 
 use std::borrow::Cow;
-use std::path::{Path, PathBuf};
+use std::path::Path;
 use std::str;
 
+use log::info;
+
 #[cfg(test)]
 mod tests;
 
 #[macro_use]
 pub mod parser;
-pub mod attr;
 pub mod lexer;
 pub mod token;
 
 crate mod classify;
-crate mod diagnostics;
 crate mod literal;
 crate mod unescape_error_reporting;
 
@@ -45,110 +38,6 @@ pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
 #[cfg(target_arch = "x86_64")]
 static_assert_size!(PResult<'_, bool>, 16);
 
-/// Collected spans during parsing for places where a certain feature was
-/// used and should be feature gated accordingly in `check_crate`.
-#[derive(Default)]
-pub struct GatedSpans {
-    /// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`.
-    pub let_chains: Lock<Vec<Span>>,
-    /// Spans collected for gating `async_closure`, e.g. `async || ..`.
-    pub async_closure: Lock<Vec<Span>>,
-    /// Spans collected for gating `yield e?` expressions (`generators` gate).
-    pub yields: Lock<Vec<Span>>,
-    /// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`.
-    pub or_patterns: Lock<Vec<Span>>,
-}
-
-/// Info about a parsing session.
-pub struct ParseSess {
-    pub span_diagnostic: Handler,
-    pub unstable_features: UnstableFeatures,
-    pub config: CrateConfig,
-    pub edition: Edition,
-    pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
-    /// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
-    pub raw_identifier_spans: Lock<Vec<Span>>,
-    /// Used to determine and report recursive module inclusions.
-    included_mod_stack: Lock<Vec<PathBuf>>,
-    source_map: Lrc<SourceMap>,
-    pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
-    /// Contains the spans of block expressions that could have been incomplete based on the
-    /// operation token that followed it, but that the parser cannot identify without further
-    /// analysis.
-    pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
-    pub injected_crate_name: Once<Symbol>,
-    pub gated_spans: GatedSpans,
-}
-
-impl ParseSess {
-    pub fn new(file_path_mapping: FilePathMapping) -> Self {
-        let cm = Lrc::new(SourceMap::new(file_path_mapping));
-        let handler = Handler::with_tty_emitter(
-            ColorConfig::Auto,
-            true,
-            None,
-            Some(cm.clone()),
-        );
-        ParseSess::with_span_handler(handler, cm)
-    }
-
-    pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
-        Self {
-            span_diagnostic: handler,
-            unstable_features: UnstableFeatures::from_environment(),
-            config: FxHashSet::default(),
-            edition: ExpnId::root().expn_data().edition,
-            missing_fragment_specifiers: Lock::new(FxHashSet::default()),
-            raw_identifier_spans: Lock::new(Vec::new()),
-            included_mod_stack: Lock::new(vec![]),
-            source_map,
-            buffered_lints: Lock::new(vec![]),
-            ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
-            injected_crate_name: Once::new(),
-            gated_spans: GatedSpans::default(),
-        }
-    }
-
-    #[inline]
-    pub fn source_map(&self) -> &SourceMap {
-        &self.source_map
-    }
-
-    pub fn buffer_lint<S: Into<MultiSpan>>(&self,
-        lint_id: BufferedEarlyLintId,
-        span: S,
-        id: NodeId,
-        msg: &str,
-    ) {
-        self.buffered_lints.with_lock(|buffered_lints| {
-            buffered_lints.push(BufferedEarlyLint{
-                span: span.into(),
-                id,
-                msg: msg.into(),
-                lint_id,
-            });
-        });
-    }
-
-    /// Extend an error with a suggestion to wrap an expression with parentheses to allow the
-    /// parser to continue parsing the following operation as part of the same expression.
-    pub fn expr_parentheses_needed(
-        &self,
-        err: &mut DiagnosticBuilder<'_>,
-        span: Span,
-        alt_snippet: Option<String>,
-    ) {
-        if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) {
-            err.span_suggestion(
-                span,
-                "parentheses are required to parse this as an expression",
-                format!("({})", snippet),
-                Applicability::MachineApplicable,
-            );
-        }
-    }
-}
-
 #[derive(Clone)]
 pub struct Directory<'a> {
     pub path: Cow<'a, Path>,
@@ -382,26 +271,131 @@ pub fn stream_to_parser_with_base_dir<'a>(
     Parser::new(sess, stream, Some(base_dir), true, false, None)
 }
 
-/// A sequence separator.
-pub struct SeqSep {
-    /// The separator token.
-    pub sep: Option<TokenKind>,
-    /// `true` if a trailing separator is allowed.
-    pub trailing_sep_allowed: bool,
+// NOTE(Centril): The following probably shouldn't be here but it acknowledges the
+// fact that architecturally, we are using parsing (read on below to understand why).
+
+pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> TokenStream {
+    // A `Nonterminal` is often a parsed AST item. At this point we now
+    // need to convert the parsed AST to an actual token stream, e.g.
+    // un-parse it basically.
+    //
+    // Unfortunately there's not really a great way to do that in a
+    // guaranteed lossless fashion right now. The fallback here is to just
+    // stringify the AST node and reparse it, but this loses all span
+    // information.
+    //
+    // As a result, some AST nodes are annotated with the token stream they
+    // came from. Here we attempt to extract these lossless token streams
+    // before we fall back to the stringification.
+    let tokens = match *nt {
+        Nonterminal::NtItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+        }
+        Nonterminal::NtTraitItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+        }
+        Nonterminal::NtImplItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+        }
+        Nonterminal::NtIdent(ident, is_raw) => {
+            Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
+        }
+        Nonterminal::NtLifetime(ident) => {
+            Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
+        }
+        Nonterminal::NtTT(ref tt) => {
+            Some(tt.clone().into())
+        }
+        _ => None,
+    };
+
+    // FIXME(#43081): Avoid this pretty-print + reparse hack
+    let source = pprust::nonterminal_to_string(nt);
+    let filename = FileName::macro_expansion_source_code(&source);
+    let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span));
+
+    // During early phases of the compiler the AST could get modified
+    // directly (e.g., attributes added or removed) and the internal cache
+    // of tokens my not be invalidated or updated. Consequently if the
+    // "lossless" token stream disagrees with our actual stringification
+    // (which has historically been much more battle-tested) then we go
+    // with the lossy stream anyway (losing span information).
+    //
+    // Note that the comparison isn't `==` here to avoid comparing spans,
+    // but it *also* is a "probable" equality which is a pretty weird
+    // definition. We mostly want to catch actual changes to the AST
+    // like a `#[cfg]` being processed or some weird `macro_rules!`
+    // expansion.
+    //
+    // What we *don't* want to catch is the fact that a user-defined
+    // literal like `0xf` is stringified as `15`, causing the cached token
+    // stream to not be literal `==` token-wise (ignoring spans) to the
+    // token stream we got from stringification.
+    //
+    // Instead the "probably equal" check here is "does each token
+    // recursively have the same discriminant?" We basically don't look at
+    // the token values here and assume that such fine grained token stream
+    // modifications, including adding/removing typically non-semantic
+    // tokens such as extra braces and commas, don't happen.
+    if let Some(tokens) = tokens {
+        if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
+            return tokens
+        }
+        info!("cached tokens found, but they're not \"probably equal\", \
+                going with stringified version");
+    }
+    return tokens_for_real
 }
 
-impl SeqSep {
-    pub fn trailing_allowed(t: TokenKind) -> SeqSep {
-        SeqSep {
-            sep: Some(t),
-            trailing_sep_allowed: true,
-        }
+fn prepend_attrs(
+    sess: &ParseSess,
+    attrs: &[ast::Attribute],
+    tokens: Option<&tokenstream::TokenStream>,
+    span: syntax_pos::Span
+) -> Option<tokenstream::TokenStream> {
+    let tokens = tokens?;
+    if attrs.len() == 0 {
+        return Some(tokens.clone())
     }
+    let mut builder = tokenstream::TokenStreamBuilder::new();
+    for attr in attrs {
+        assert_eq!(attr.style, ast::AttrStyle::Outer,
+                   "inner attributes should prevent cached tokens from existing");
+
+        let source = pprust::attribute_to_string(attr);
+        let macro_filename = FileName::macro_expansion_source_code(&source);
+        if attr.is_sugared_doc {
+            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
+            builder.push(stream);
+            continue
+        }
 
-    pub fn none() -> SeqSep {
-        SeqSep {
-            sep: None,
-            trailing_sep_allowed: false,
+        // synthesize # [ $path $tokens ] manually here
+        let mut brackets = tokenstream::TokenStreamBuilder::new();
+
+        // For simple paths, push the identifier directly
+        if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
+            let ident = attr.path.segments[0].ident;
+            let token = token::Ident(ident.name, ident.as_str().starts_with("r#"));
+            brackets.push(tokenstream::TokenTree::token(token, ident.span));
+
+        // ... and for more complicated paths, fall back to a reparse hack that
+        // should eventually be removed.
+        } else {
+            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
+            brackets.push(stream);
         }
+
+        brackets.push(attr.tokens.clone());
+
+        // The span we list here for `#` and for `[ ... ]` are both wrong in
+        // that it encompasses more than each token, but it hopefully is "good
+        // enough" for now at least.
+        builder.push(tokenstream::TokenTree::token(token::Pound, attr.span));
+        let delim_span = tokenstream::DelimSpan::from_single(attr.span);
+        builder.push(tokenstream::TokenTree::Delimited(
+            delim_span, token::DelimToken::Bracket, brackets.build().into()));
     }
+    builder.push(tokens.clone());
+    Some(builder.build())
 }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index d4a6e9f6c6b..6bbd8be0cb9 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -1,28 +1,28 @@
+pub mod attr;
 mod expr;
 mod pat;
 mod item;
-pub use item::AliasKind;
 mod module;
-pub use module::{ModulePath, ModulePathSuccess};
 mod ty;
 mod path;
 pub use path::PathStyle;
 mod stmt;
 mod generics;
+mod diagnostics;
+use diagnostics::Error;
 
 use crate::ast::{
-    self, DUMMY_NODE_ID, AttrStyle, Attribute, BindingMode, CrateSugar, FnDecl, Ident,
-    IsAsync, MacDelimiter, Mutability, Param, StrStyle, SelfKind, TyKind, Visibility,
-    VisibilityKind, Unsafety,
+    self, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident,
+    IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety,
 };
-use crate::parse::{ParseSess, PResult, Directory, DirectoryOwnership, SeqSep, literal, token};
-use crate::parse::diagnostics::{Error, dummy_arg};
+use crate::parse::{PResult, Directory, DirectoryOwnership};
 use crate::parse::lexer::UnmatchedBrace;
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use crate::parse::token::{Token, TokenKind, DelimToken};
+use crate::parse::token::{self, Token, TokenKind, DelimToken};
 use crate::print::pprust;
 use crate::ptr::P;
-use crate::source_map::{self, respan};
+use crate::sess::ParseSess;
+use crate::source_map::respan;
 use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
 use crate::ThinVec;
@@ -44,14 +44,14 @@ bitflags::bitflags! {
 }
 
 #[derive(Clone, Copy, PartialEq, Debug)]
-crate enum SemiColonMode {
+enum SemiColonMode {
     Break,
     Ignore,
     Comma,
 }
 
 #[derive(Clone, Copy, PartialEq, Debug)]
-crate enum BlockMode {
+enum BlockMode {
     Break,
     Ignore,
 }
@@ -124,33 +124,33 @@ pub struct Parser<'a> {
     prev_token_kind: PrevTokenKind,
     restrictions: Restrictions,
     /// Used to determine the path to externally loaded source files.
-    crate directory: Directory<'a>,
+    pub(super) directory: Directory<'a>,
     /// `true` to parse sub-modules in other files.
-    pub recurse_into_file_modules: bool,
+    pub(super) recurse_into_file_modules: bool,
     /// Name of the root module this parser originated from. If `None`, then the
     /// name is not known. This does not change while the parser is descending
     /// into modules, and sub-parsers have new values for this name.
     pub root_module_name: Option<String>,
-    crate expected_tokens: Vec<TokenType>,
+    expected_tokens: Vec<TokenType>,
     token_cursor: TokenCursor,
     desugar_doc_comments: bool,
     /// `true` we should configure out of line modules as we parse.
-    pub cfg_mods: bool,
+    cfg_mods: bool,
     /// This field is used to keep track of how many left angle brackets we have seen. This is
     /// required in order to detect extra leading left angle brackets (`<` characters) and error
     /// appropriately.
     ///
     /// See the comments in the `parse_path_segment` function for more details.
-    crate unmatched_angle_bracket_count: u32,
-    crate max_angle_bracket_count: u32,
+    unmatched_angle_bracket_count: u32,
+    max_angle_bracket_count: u32,
     /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
     /// it gets removed from here. Every entry left at the end gets emitted as an independent
     /// error.
-    crate unclosed_delims: Vec<UnmatchedBrace>,
-    crate last_unexpected_token_span: Option<Span>,
-    crate last_type_ascription: Option<(Span, bool /* likely path typo */)>,
+    pub(super) unclosed_delims: Vec<UnmatchedBrace>,
+    last_unexpected_token_span: Option<Span>,
+    pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
     /// If present, this `Parser` is not parsing Rust code but rather a macro call.
-    crate subparser_name: Option<&'static str>,
+    subparser_name: Option<&'static str>,
 }
 
 impl<'a> Drop for Parser<'a> {
@@ -194,7 +194,7 @@ struct TokenCursorFrame {
 /// You can find some more example usage of this in the `collect_tokens` method
 /// on the parser.
 #[derive(Clone)]
-crate enum LastToken {
+enum LastToken {
     Collecting(Vec<TreeAndJoint>),
     Was(Option<TreeAndJoint>),
 }
@@ -285,10 +285,10 @@ impl TokenCursor {
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
                 [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
-                    .iter().cloned().collect::<TokenStream>().into()
+                    .iter().cloned().collect::<TokenStream>()
             } else {
                 [TokenTree::token(token::Pound, sp), body]
-                    .iter().cloned().collect::<TokenStream>().into()
+                    .iter().cloned().collect::<TokenStream>()
             },
         )));
 
@@ -297,7 +297,7 @@ impl TokenCursor {
 }
 
 #[derive(Clone, PartialEq)]
-crate enum TokenType {
+enum TokenType {
     Token(TokenKind),
     Keyword(Symbol),
     Operator,
@@ -309,7 +309,7 @@ crate enum TokenType {
 }
 
 impl TokenType {
-    crate fn to_string(&self) -> String {
+    fn to_string(&self) -> String {
         match *self {
             TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
             TokenType::Keyword(kw) => format!("`{}`", kw),
@@ -324,11 +324,35 @@ impl TokenType {
 }
 
 #[derive(Copy, Clone, Debug)]
-crate enum TokenExpectType {
+enum TokenExpectType {
     Expect,
     NoExpect,
 }
 
+/// A sequence separator.
+struct SeqSep {
+    /// The separator token.
+    sep: Option<TokenKind>,
+    /// `true` if a trailing separator is allowed.
+    trailing_sep_allowed: bool,
+}
+
+impl SeqSep {
+    fn trailing_allowed(t: TokenKind) -> SeqSep {
+        SeqSep {
+            sep: Some(t),
+            trailing_sep_allowed: true,
+        }
+    }
+
+    fn none() -> SeqSep {
+        SeqSep {
+            sep: None,
+            trailing_sep_allowed: false,
+        }
+    }
+}
+
 impl<'a> Parser<'a> {
     pub fn new(
         sess: &'a ParseSess,
@@ -405,7 +429,7 @@ impl<'a> Parser<'a> {
         pprust::token_to_string(&self.token)
     }
 
-    crate fn token_descr(&self) -> Option<&'static str> {
+    fn token_descr(&self) -> Option<&'static str> {
         Some(match &self.token.kind {
             _ if self.token.is_special_ident() => "reserved identifier",
             _ if self.token.is_used_keyword() => "keyword",
@@ -415,7 +439,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    crate fn this_token_descr(&self) -> String {
+    pub(super) fn this_token_descr(&self) -> String {
         if let Some(prefix) = self.token_descr() {
             format!("{} `{}`", prefix, self.this_token_to_string())
         } else {
@@ -465,7 +489,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
+    fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
         self.parse_ident_common(true)
     }
 
@@ -498,7 +522,7 @@ impl<'a> Parser<'a> {
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
     /// encountered.
-    crate fn check(&mut self, tok: &TokenKind) -> bool {
+    fn check(&mut self, tok: &TokenKind) -> bool {
         let is_present = self.token == *tok;
         if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
         is_present
@@ -520,7 +544,7 @@ impl<'a> Parser<'a> {
 
     /// If the next token is the given keyword, eats it and returns `true`.
     /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
-    pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
+    fn eat_keyword(&mut self, kw: Symbol) -> bool {
         if self.check_keyword(kw) {
             self.bump();
             true
@@ -558,7 +582,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn check_ident(&mut self) -> bool {
+    fn check_ident(&mut self) -> bool {
         self.check_or_expected(self.token.is_ident(), TokenType::Ident)
     }
 
@@ -638,10 +662,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
-        literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
-    }
-
     /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
     /// `<` and continue. If `<-` is seen, replaces it with a single `<`
     /// and continue. If a `<` is not seen, returns false.
@@ -727,7 +747,7 @@ impl<'a> Parser<'a> {
     /// Parses a sequence, including the closing delimiter. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
-    pub fn parse_seq_to_end<T>(
+    fn parse_seq_to_end<T>(
         &mut self,
         ket: &TokenKind,
         sep: SeqSep,
@@ -743,7 +763,7 @@ impl<'a> Parser<'a> {
     /// Parses a sequence, not including the closing delimiter. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
-    pub fn parse_seq_to_before_end<T>(
+    fn parse_seq_to_before_end<T>(
         &mut self,
         ket: &TokenKind,
         sep: SeqSep,
@@ -761,7 +781,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    crate fn parse_seq_to_before_tokens<T>(
+    fn parse_seq_to_before_tokens<T>(
         &mut self,
         kets: &[&TokenKind],
         sep: SeqSep,
@@ -1007,7 +1027,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn process_potential_macro_variable(&mut self) {
+    pub fn process_potential_macro_variable(&mut self) {
         self.token = match self.token.kind {
             token::Dollar if self.token.span.from_expansion() &&
                              self.look_ahead(1, |t| t.is_ident()) => {
@@ -1041,7 +1061,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a single token tree from the input.
-    crate fn parse_token_tree(&mut self) -> TokenTree {
+    pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
             token::OpenDelim(..) => {
                 let frame = mem::replace(&mut self.token_cursor.frame,
@@ -1094,302 +1114,6 @@ impl<'a> Parser<'a> {
         res
     }
 
-    fn parse_fn_params(
-        &mut self,
-        named_params: bool,
-        allow_c_variadic: bool,
-    ) -> PResult<'a, Vec<Param>> {
-        let sp = self.token.span;
-        let do_not_enforce_named_params_for_c_variadic = |token: &token::Token| {
-            match token.kind {
-                token::DotDotDot => false,
-                _ => named_params,
-            }
-        };
-        let mut c_variadic = false;
-        let (params, _) = self.parse_paren_comma_seq(|p| {
-            match p.parse_param_general(
-                false,
-                false,
-                allow_c_variadic,
-                do_not_enforce_named_params_for_c_variadic,
-            ) {
-                Ok(param) => Ok(
-                    if let TyKind::CVarArgs = param.ty.kind {
-                        c_variadic = true;
-                        if p.token != token::CloseDelim(token::Paren) {
-                            p.span_err(
-                                p.token.span,
-                                "`...` must be the last argument of a C-variadic function",
-                            );
-                            // FIXME(eddyb) this should probably still push `CVarArgs`.
-                            // Maybe AST validation/HIR lowering should emit the above error?
-                            None
-                        } else {
-                            Some(param)
-                        }
-                    } else {
-                        Some(param)
-                    }
-                ),
-                Err(mut e) => {
-                    e.emit();
-                    let lo = p.prev_span;
-                    // Skip every token until next possible arg or end.
-                    p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
-                    // Create a placeholder argument for proper arg count (issue #34264).
-                    let span = lo.to(p.prev_span);
-                    Ok(Some(dummy_arg(Ident::new(kw::Invalid, span))))
-                }
-            }
-        })?;
-
-        let params: Vec<_> = params.into_iter().filter_map(|x| x).collect();
-
-        if c_variadic && params.len() <= 1 {
-            self.span_err(
-                sp,
-                "C-variadic function must be declared with at least one named argument",
-            );
-        }
-
-        Ok(params)
-    }
-
-    /// Parses the parameter list and result type of a function that may have a `self` parameter.
-    fn parse_fn_decl_with_self(
-        &mut self,
-        is_name_required: impl Copy + Fn(&token::Token) -> bool,
-    ) -> PResult<'a, P<FnDecl>> {
-        // Parse the arguments, starting out with `self` being allowed...
-        let mut is_self_allowed = true;
-        let (mut inputs, _): (Vec<_>, _) = self.parse_paren_comma_seq(|p| {
-            let res = p.parse_param_general(is_self_allowed, true, false, is_name_required);
-            // ...but now that we've parsed the first argument, `self` is no longer allowed.
-            is_self_allowed = false;
-            res
-        })?;
-
-        // Replace duplicated recovered params with `_` pattern to avoid unecessary errors.
-        self.deduplicate_recovered_params_names(&mut inputs);
-
-        Ok(P(FnDecl {
-            inputs,
-            output: self.parse_ret_ty(true)?,
-        }))
-    }
-
-    /// Skips unexpected attributes and doc comments in this position and emits an appropriate
-    /// error.
-    /// This version of parse param doesn't necessarily require identifier names.
-    fn parse_param_general(
-        &mut self,
-        is_self_allowed: bool,
-        is_trait_item: bool,
-        allow_c_variadic: bool,
-        is_name_required: impl Fn(&token::Token) -> bool,
-    ) -> PResult<'a, Param> {
-        let lo = self.token.span;
-        let attrs = self.parse_outer_attributes()?;
-
-        // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
-        if let Some(mut param) = self.parse_self_param()? {
-            param.attrs = attrs.into();
-            return if is_self_allowed {
-                Ok(param)
-            } else {
-                self.recover_bad_self_param(param, is_trait_item)
-            };
-        }
-
-        let is_name_required = is_name_required(&self.token);
-        let (pat, ty) = if is_name_required || self.is_named_param() {
-            debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
-
-            let pat = self.parse_fn_param_pat()?;
-            if let Err(mut err) = self.expect(&token::Colon) {
-                if let Some(ident) = self.parameter_without_type(
-                    &mut err,
-                    pat,
-                    is_name_required,
-                    is_self_allowed,
-                    is_trait_item,
-                ) {
-                    err.emit();
-                    return Ok(dummy_arg(ident));
-                } else {
-                    return Err(err);
-                }
-            }
-
-            self.eat_incorrect_doc_comment_for_param_type();
-            (pat, self.parse_ty_common(true, true, allow_c_variadic)?)
-        } else {
-            debug!("parse_param_general ident_to_pat");
-            let parser_snapshot_before_ty = self.clone();
-            self.eat_incorrect_doc_comment_for_param_type();
-            let mut ty = self.parse_ty_common(true, true, allow_c_variadic);
-            if ty.is_ok() && self.token != token::Comma &&
-               self.token != token::CloseDelim(token::Paren) {
-                // This wasn't actually a type, but a pattern looking like a type,
-                // so we are going to rollback and re-parse for recovery.
-                ty = self.unexpected();
-            }
-            match ty {
-                Ok(ty) => {
-                    let ident = Ident::new(kw::Invalid, self.prev_span);
-                    let bm = BindingMode::ByValue(Mutability::Immutable);
-                    let pat = self.mk_pat_ident(ty.span, bm, ident);
-                    (pat, ty)
-                }
-                // If this is a C-variadic argument and we hit an error, return the error.
-                Err(err) if self.token == token::DotDotDot => return Err(err),
-                // Recover from attempting to parse the argument as a type without pattern.
-                Err(mut err) => {
-                    err.cancel();
-                    mem::replace(self, parser_snapshot_before_ty);
-                    self.recover_arg_parse()?
-                }
-            }
-        };
-
-        let span = lo.to(self.token.span);
-
-        Ok(Param {
-            attrs: attrs.into(),
-            id: ast::DUMMY_NODE_ID,
-            is_placeholder: false,
-            pat,
-            span,
-            ty,
-        })
-    }
-
-    /// Returns the parsed optional self parameter and whether a self shortcut was used.
-    ///
-    /// See `parse_self_param_with_attrs` to collect attributes.
-    fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
-        // Extract an identifier *after* having confirmed that the token is one.
-        let expect_self_ident = |this: &mut Self| {
-            match this.token.kind {
-                // Preserve hygienic context.
-                token::Ident(name, _) => {
-                    let span = this.token.span;
-                    this.bump();
-                    Ident::new(name, span)
-                }
-                _ => unreachable!(),
-            }
-        };
-        // Is `self` `n` tokens ahead?
-        let is_isolated_self = |this: &Self, n| {
-            this.is_keyword_ahead(n, &[kw::SelfLower])
-            && this.look_ahead(n + 1, |t| t != &token::ModSep)
-        };
-        // Is `mut self` `n` tokens ahead?
-        let is_isolated_mut_self = |this: &Self, n| {
-            this.is_keyword_ahead(n, &[kw::Mut])
-            && is_isolated_self(this, n + 1)
-        };
-        // Parse `self` or `self: TYPE`. We already know the current token is `self`.
-        let parse_self_possibly_typed = |this: &mut Self, m| {
-            let eself_ident = expect_self_ident(this);
-            let eself_hi = this.prev_span;
-            let eself = if this.eat(&token::Colon) {
-                SelfKind::Explicit(this.parse_ty()?, m)
-            } else {
-                SelfKind::Value(m)
-            };
-            Ok((eself, eself_ident, eself_hi))
-        };
-        // Recover for the grammar `*self`, `*const self`, and `*mut self`.
-        let recover_self_ptr = |this: &mut Self| {
-            let msg = "cannot pass `self` by raw pointer";
-            let span = this.token.span;
-            this.struct_span_err(span, msg)
-                .span_label(span, msg)
-                .emit();
-
-            Ok((SelfKind::Value(Mutability::Immutable), expect_self_ident(this), this.prev_span))
-        };
-
-        // Parse optional `self` parameter of a method.
-        // Only a limited set of initial token sequences is considered `self` parameters; anything
-        // else is parsed as a normal function parameter list, so some lookahead is required.
-        let eself_lo = self.token.span;
-        let (eself, eself_ident, eself_hi) = match self.token.kind {
-            token::BinOp(token::And) => {
-                let eself = if is_isolated_self(self, 1) {
-                    // `&self`
-                    self.bump();
-                    SelfKind::Region(None, Mutability::Immutable)
-                } else if is_isolated_mut_self(self, 1) {
-                    // `&mut self`
-                    self.bump();
-                    self.bump();
-                    SelfKind::Region(None, Mutability::Mutable)
-                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) {
-                    // `&'lt self`
-                    self.bump();
-                    let lt = self.expect_lifetime();
-                    SelfKind::Region(Some(lt), Mutability::Immutable)
-                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) {
-                    // `&'lt mut self`
-                    self.bump();
-                    let lt = self.expect_lifetime();
-                    self.bump();
-                    SelfKind::Region(Some(lt), Mutability::Mutable)
-                } else {
-                    // `&not_self`
-                    return Ok(None);
-                };
-                (eself, expect_self_ident(self), self.prev_span)
-            }
-            // `*self`
-            token::BinOp(token::Star) if is_isolated_self(self, 1) => {
-                self.bump();
-                recover_self_ptr(self)?
-            }
-            // `*mut self` and `*const self`
-            token::BinOp(token::Star) if
-                self.look_ahead(1, |t| t.is_mutability())
-                && is_isolated_self(self, 2) =>
-            {
-                self.bump();
-                self.bump();
-                recover_self_ptr(self)?
-            }
-            // `self` and `self: TYPE`
-            token::Ident(..) if is_isolated_self(self, 0) => {
-                parse_self_possibly_typed(self, Mutability::Immutable)?
-            }
-            // `mut self` and `mut self: TYPE`
-            token::Ident(..) if is_isolated_mut_self(self, 0) => {
-                self.bump();
-                parse_self_possibly_typed(self, Mutability::Mutable)?
-            }
-            _ => return Ok(None),
-        };
-
-        let eself = source_map::respan(eself_lo.to(eself_hi), eself);
-        Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident)))
-    }
-
-    fn is_named_param(&self) -> bool {
-        let offset = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
-                _ => 0,
-            }
-            token::BinOp(token::And) | token::AndAnd => 1,
-            _ if self.token.is_keyword(kw::Mut) => 1,
-            _ => 0,
-        };
-
-        self.look_ahead(offset, |t| t.is_ident()) &&
-        self.look_ahead(offset + 1, |t| t == &token::Colon)
-    }
-
     fn is_crate_vis(&self) -> bool {
         self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
     }
@@ -1474,12 +1198,14 @@ impl<'a> Parser<'a> {
 `pub(super)`: visible only in the current module's parent
 `pub(in path::to::module)`: visible only on the specified path"##;
 
+        let path_str = pprust::path_to_string(&path);
+
         struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
             .help(suggestion)
             .span_suggestion(
                 path.span,
-                &format!("make this visible only to module `{}` with `in`", path),
-                format!("in {}", path),
+                &format!("make this visible only to module `{}` with `in`", path_str),
+                format!("in {}", path_str),
                 Applicability::MachineApplicable,
             )
             .emit();
@@ -1487,6 +1213,15 @@ impl<'a> Parser<'a> {
         Ok(())
     }
 
+    /// Parses `extern` followed by an optional ABI string, or nothing.
+    fn parse_extern_abi(&mut self) -> PResult<'a, Abi> {
+        if self.eat_keyword(kw::Extern) {
+            Ok(self.parse_opt_abi()?.unwrap_or(Abi::C))
+        } else {
+            Ok(Abi::Rust)
+        }
+    }
+
     /// Parses a string as an ABI spec on an extern type or module. Consumes
     /// the `extern` keyword, if one is found.
     fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
@@ -1559,7 +1294,7 @@ impl<'a> Parser<'a> {
             // This can happen due to a bad interaction of two unrelated recovery mechanisms with
             // mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
             // (#62881).
-            return Ok((ret?, TokenStream::new(vec![])));
+            return Ok((ret?, TokenStream::default()));
         } else {
             &mut self.token_cursor.stack[prev].last_token
         };
@@ -1574,7 +1309,7 @@ impl<'a> Parser<'a> {
                 // This can happen due to a bad interaction of two unrelated recovery mechanisms
                 // with mismatched delimiters *and* recovery lookahead on the likely typo
                 // `pub ident(` (#62895, different but similar to the case above).
-                return Ok((ret?, TokenStream::new(vec![])));
+                return Ok((ret?, TokenStream::default()));
             }
         };
 
@@ -1612,7 +1347,7 @@ impl<'a> Parser<'a> {
                                    *t == token::BinOp(token::Star))
     }
 
-    pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
+    fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
         let ret = match self.token.kind {
             token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
                 (symbol, ast::StrStyle::Cooked, suffix),
@@ -1652,7 +1387,7 @@ impl<'a> Parser<'a> {
             ],
             Applicability::MaybeIncorrect,
         ).span_suggestion(
-            self.sess.source_map.next_point(self.prev_span),
+            self.sess.source_map().next_point(self.prev_span),
             "add a semicolon",
             ';'.to_string(),
             Applicability::MaybeIncorrect,
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/parser/attr.rs
index e74f3045db8..188a144cac9 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/parser/attr.rs
@@ -1,13 +1,11 @@
+use super::{SeqSep, PResult, Parser, TokenType, PathStyle};
 use crate::attr;
 use crate::ast;
-use crate::parse::{SeqSep, PResult};
 use crate::parse::token::{self, Nonterminal, DelimToken};
-use crate::parse::parser::{Parser, TokenType, PathStyle};
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::source_map::Span;
 
 use log::debug;
-use smallvec::smallvec;
 
 #[derive(Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -20,7 +18,7 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
 
 impl<'a> Parser<'a> {
     /// Parses attributes that appear before an item.
-    crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
+    pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
         let mut attrs: Vec<ast::Attribute> = Vec::new();
         let mut just_parsed_doc_comment = false;
         loop {
@@ -84,9 +82,10 @@ impl<'a> Parser<'a> {
 
     /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy`
     /// that prescribes how to handle inner attributes.
-    fn parse_attribute_with_inner_parse_policy(&mut self,
-                                               inner_parse_policy: InnerAttributeParsePolicy<'_>)
-                                               -> PResult<'a, ast::Attribute> {
+    fn parse_attribute_with_inner_parse_policy(
+        &mut self,
+        inner_parse_policy: InnerAttributeParsePolicy<'_>
+    ) -> PResult<'a, ast::Attribute> {
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
                self.token);
@@ -193,17 +192,17 @@ impl<'a> Parser<'a> {
                         is_interpolated_expr = true;
                     }
                 }
-                let tokens = if is_interpolated_expr {
+                let token_tree = if is_interpolated_expr {
                     // We need to accept arbitrary interpolated expressions to continue
                     // supporting things like `doc = $expr` that work on stable.
                     // Non-literal interpolated expressions are rejected after expansion.
-                    self.parse_token_tree().into()
+                    self.parse_token_tree()
                 } else {
-                    self.parse_unsuffixed_lit()?.tokens()
+                    self.parse_unsuffixed_lit()?.token_tree()
                 };
-                TokenStream::from_streams(smallvec![eq.into(), tokens])
+                TokenStream::new(vec![eq.into(), token_tree.into()])
             } else {
-                TokenStream::empty()
+                TokenStream::default()
             };
             ast::AttrItem { path, tokens }
         })
@@ -260,6 +259,27 @@ impl<'a> Parser<'a> {
         Ok(lit)
     }
 
+    /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited.
+    crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> {
+        self.expect(&token::OpenDelim(token::Paren))?;
+
+        let cfg_predicate = self.parse_meta_item()?;
+        self.expect(&token::Comma)?;
+
+        // Presumably, the majority of the time there will only be one attr.
+        let mut expanded_attrs = Vec::with_capacity(1);
+
+        while !self.check(&token::CloseDelim(token::Paren)) {
+            let lo = self.token.span.lo();
+            let item = self.parse_attr_item()?;
+            expanded_attrs.push((item, self.prev_span.with_lo(lo)));
+            self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?;
+        }
+
+        self.expect(&token::CloseDelim(token::Paren))?;
+        Ok((cfg_predicate, expanded_attrs))
+    }
+
     /// Matches the following grammar (per RFC 1559).
     ///
     ///     meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs
index 4ad0bd06d99..06982c789db 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/parser/diagnostics.rs
@@ -1,10 +1,11 @@
+use super::{
+    BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType,
+    SeqSep, PResult, Parser
+};
 use crate::ast::{
     self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
-    Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
+    Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
 };
-use crate::feature_gate::{feature_err, UnstableFeatures};
-use crate::parse::{SeqSep, PResult, Parser, ParseSess};
-use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
 use crate::parse::token::{self, TokenKind};
 use crate::print::pprust;
 use crate::ptr::P;
@@ -17,8 +18,10 @@ use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError};
 use log::{debug, trace};
 use std::mem;
 
+const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments";
+
 /// Creates a placeholder argument.
-crate fn dummy_arg(ident: Ident) -> Param {
+pub(super) fn dummy_arg(ident: Ident) -> Param {
     let pat = P(Pat {
         id: ast::DUMMY_NODE_ID,
         kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
@@ -121,7 +124,7 @@ impl Error {
     }
 }
 
-pub trait RecoverQPath: Sized + 'static {
+pub(super) trait RecoverQPath: Sized + 'static {
     const PATH_STYLE: PathStyle = PathStyle::Expr;
     fn to_ty(&self) -> Option<P<Ty>>;
     fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
@@ -173,39 +176,43 @@ impl<'a> Parser<'a> {
         self.span_fatal(self.token.span, m)
     }
 
-    pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+    crate fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
         self.sess.span_diagnostic.struct_span_fatal(sp, m)
     }
 
-    pub fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
+    pub(super) fn span_fatal_err<S: Into<MultiSpan>>(
+        &self,
+        sp: S,
+        err: Error,
+    ) -> DiagnosticBuilder<'a> {
         err.span_err(sp, self.diagnostic())
     }
 
-    pub fn bug(&self, m: &str) -> ! {
+    pub(super) fn bug(&self, m: &str) -> ! {
         self.sess.span_diagnostic.span_bug(self.token.span, m)
     }
 
-    pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
+    pub(super) fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
         self.sess.span_diagnostic.span_err(sp, m)
     }
 
-    crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+    pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
         self.sess.span_diagnostic.struct_span_err(sp, m)
     }
 
-    crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
+    pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
         self.sess.span_diagnostic.span_bug(sp, m)
     }
 
-    crate fn diagnostic(&self) -> &'a errors::Handler {
+    pub(super) fn diagnostic(&self) -> &'a errors::Handler {
         &self.sess.span_diagnostic
     }
 
-    crate fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
+    pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
         self.sess.source_map().span_to_snippet(span)
     }
 
-    crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
+    pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
         let mut err = self.struct_span_err(
             self.token.span,
             &format!("expected identifier, found {}", self.this_token_descr()),
@@ -236,7 +243,7 @@ impl<'a> Parser<'a> {
         err
     }
 
-    pub fn expected_one_of_not_found(
+    pub(super) fn expected_one_of_not_found(
         &mut self,
         edible: &[TokenKind],
         inedible: &[TokenKind],
@@ -386,14 +393,17 @@ impl<'a> Parser<'a> {
             let next_pos = sm.lookup_char_pos(self.token.span.lo());
             let op_pos = sm.lookup_char_pos(sp.hi());
 
+            let allow_unstable = self.sess.unstable_features.is_nightly_build();
+
             if likely_path {
                 err.span_suggestion(
                     sp,
                     "maybe write a path separator here",
                     "::".to_string(),
-                    match self.sess.unstable_features {
-                        UnstableFeatures::Disallow => Applicability::MachineApplicable,
-                        _ => Applicability::MaybeIncorrect,
+                    if allow_unstable {
+                        Applicability::MaybeIncorrect
+                    } else {
+                        Applicability::MachineApplicable
                     },
                 );
             } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
@@ -403,14 +413,13 @@ impl<'a> Parser<'a> {
                     ";".to_string(),
                     Applicability::MaybeIncorrect,
                 );
-            } else if let UnstableFeatures::Disallow = self.sess.unstable_features {
-                err.span_label(sp, "tried to parse a type due to this");
-            } else {
+            } else if allow_unstable {
                 err.span_label(sp, "tried to parse a type due to this type ascription");
+            } else {
+                err.span_label(sp, "tried to parse a type due to this");
             }
-            if let UnstableFeatures::Disallow = self.sess.unstable_features {
+            if allow_unstable {
                 // Give extra information about type ascription only if it's a nightly compiler.
-            } else {
                 err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \
                           type: `<expr>: <type>`");
                 err.note("for more information, see \
@@ -421,7 +430,7 @@ impl<'a> Parser<'a> {
 
     /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
-    crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
+    pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
         if let Err(ref mut err) = self.parse_seq_to_before_tokens(
             kets,
             SeqSep::none(),
@@ -439,7 +448,7 @@ impl<'a> Parser<'a> {
     /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
     ///                                                        ^^ help: remove extra angle brackets
     /// ```
-    crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
+    pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
         // This function is intended to be invoked after parsing a path segment where there are two
         // cases:
         //
@@ -543,38 +552,157 @@ impl<'a> Parser<'a> {
     }
 
     /// Produces an error if comparison operators are chained (RFC #558).
-    /// We only need to check the LHS, not the RHS, because all comparison ops
-    /// have same precedence and are left-associative.
-    crate fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) -> PResult<'a, ()> {
-        debug_assert!(outer_op.is_comparison(),
-                      "check_no_chained_comparison: {:?} is not comparison",
-                      outer_op);
+    /// We only need to check the LHS, not the RHS, because all comparison ops have same
+    /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
+    ///
+    /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
+    /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
+    /// case.
+    ///
+    /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
+    /// associative we can infer that we have:
+    ///
+    ///           outer_op
+    ///           /   \
+    ///     inner_op   r2
+    ///        /  \
+    ///     l1    r1
+    pub(super) fn check_no_chained_comparison(
+        &mut self,
+        lhs: &Expr,
+        outer_op: &AssocOp,
+    ) -> PResult<'a, Option<P<Expr>>> {
+        debug_assert!(
+            outer_op.is_comparison(),
+            "check_no_chained_comparison: {:?} is not comparison",
+            outer_op,
+        );
+
+        let mk_err_expr = |this: &Self, span| {
+            Ok(Some(this.mk_expr(span, ExprKind::Err, ThinVec::new())))
+        };
+
         match lhs.kind {
             ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
                 // Respan to include both operators.
-                let op_span = op.span.to(self.token.span);
+                let op_span = op.span.to(self.prev_span);
                 let mut err = self.struct_span_err(
                     op_span,
                     "chained comparison operators require parentheses",
                 );
+
+                let suggest = |err: &mut DiagnosticBuilder<'_>| {
+                    err.span_suggestion_verbose(
+                        op_span.shrink_to_lo(),
+                        TURBOFISH,
+                        "::".to_string(),
+                        Applicability::MaybeIncorrect,
+                    );
+                };
+
                 if op.node == BinOpKind::Lt &&
                     *outer_op == AssocOp::Less ||  // Include `<` to provide this recommendation
                     *outer_op == AssocOp::Greater  // even in a case like the following:
                 {                                  //     Foo<Bar<Baz<Qux, ()>>>
-                    err.help(
-                        "use `::<...>` instead of `<...>` if you meant to specify type arguments");
-                    err.help("or use `(...)` if you meant to specify fn arguments");
-                    // These cases cause too many knock-down errors, bail out (#61329).
-                    return Err(err);
+                    if *outer_op == AssocOp::Less {
+                        let snapshot = self.clone();
+                        self.bump();
+                        // So far we have parsed `foo<bar<`, consume the rest of the type args.
+                        let modifiers = [
+                            (token::Lt, 1),
+                            (token::Gt, -1),
+                            (token::BinOp(token::Shr), -2),
+                        ];
+                        self.consume_tts(1, &modifiers[..]);
+
+                        if !&[
+                            token::OpenDelim(token::Paren),
+                            token::ModSep,
+                        ].contains(&self.token.kind) {
+                            // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
+                            // parser and bail out.
+                            mem::replace(self, snapshot.clone());
+                        }
+                    }
+                    return if token::ModSep == self.token.kind {
+                        // We have some certainty that this was a bad turbofish at this point.
+                        // `foo< bar >::`
+                        suggest(&mut err);
+
+                        let snapshot = self.clone();
+                        self.bump(); // `::`
+
+                        // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
+                        match self.parse_expr() {
+                            Ok(_) => {
+                                // 99% certain that the suggestion is correct, continue parsing.
+                                err.emit();
+                                // FIXME: actually check that the two expressions in the binop are
+                                // paths and resynthesize new fn call expression instead of using
+                                // `ExprKind::Err` placeholder.
+                                mk_err_expr(self, lhs.span.to(self.prev_span))
+                            }
+                            Err(mut expr_err) => {
+                                expr_err.cancel();
+                                // Not entirely sure now, but we bubble the error up with the
+                                // suggestion.
+                                mem::replace(self, snapshot);
+                                Err(err)
+                            }
+                        }
+                    } else if token::OpenDelim(token::Paren) == self.token.kind {
+                        // We have high certainty that this was a bad turbofish at this point.
+                        // `foo< bar >(`
+                        suggest(&mut err);
+                        // Consume the fn call arguments.
+                        match self.consume_fn_args() {
+                            Err(()) => Err(err),
+                            Ok(()) => {
+                                err.emit();
+                                // FIXME: actually check that the two expressions in the binop are
+                                // paths and resynthesize new fn call expression instead of using
+                                // `ExprKind::Err` placeholder.
+                                mk_err_expr(self, lhs.span.to(self.prev_span))
+                            }
+                        }
+                    } else {
+                        // All we know is that this is `foo < bar >` and *nothing* else. Try to
+                        // be helpful, but don't attempt to recover.
+                        err.help(TURBOFISH);
+                        err.help("or use `(...)` if you meant to specify fn arguments");
+                        // These cases cause too many knock-down errors, bail out (#61329).
+                        Err(err)
+                    };
                 }
                 err.emit();
             }
             _ => {}
         }
-        Ok(())
+        Ok(None)
+    }
+
+    fn consume_fn_args(&mut self) -> Result<(), ()> {
+        let snapshot = self.clone();
+        self.bump(); // `(`
+
+        // Consume the fn call arguments.
+        let modifiers = [
+            (token::OpenDelim(token::Paren), 1),
+            (token::CloseDelim(token::Paren), -1),
+        ];
+        self.consume_tts(1, &modifiers[..]);
+
+        if self.token.kind == token::Eof {
+            // Not entirely sure that what we consumed were fn arguments, rollback.
+            mem::replace(self, snapshot);
+            Err(())
+        } else {
+            // 99% certain that the suggestion is correct, continue parsing.
+            Ok(())
+        }
     }
 
-    crate fn maybe_report_ambiguous_plus(
+    pub(super) fn maybe_report_ambiguous_plus(
         &mut self,
         allow_plus: bool,
         impl_dyn_multi: bool,
@@ -593,55 +721,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn maybe_report_invalid_custom_discriminants(
-        sess: &ParseSess,
-        variants: &[ast::Variant],
-    ) {
-        let has_fields = variants.iter().any(|variant| match variant.data {
-            VariantData::Tuple(..) | VariantData::Struct(..) => true,
-            VariantData::Unit(..) => false,
-        });
-
-        let discriminant_spans = variants.iter().filter(|variant| match variant.data {
-            VariantData::Tuple(..) | VariantData::Struct(..) => false,
-            VariantData::Unit(..) => true,
-        })
-        .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
-        .collect::<Vec<_>>();
-
-        if !discriminant_spans.is_empty() && has_fields {
-            let mut err = feature_err(
-                sess,
-                sym::arbitrary_enum_discriminant,
-                discriminant_spans.clone(),
-                crate::feature_gate::GateIssue::Language,
-                "custom discriminant values are not allowed in enums with tuple or struct variants",
-            );
-            for sp in discriminant_spans {
-                err.span_label(sp, "disallowed custom discriminant");
-            }
-            for variant in variants.iter() {
-                match &variant.data {
-                    VariantData::Struct(..) => {
-                        err.span_label(
-                            variant.span,
-                            "struct variant defined here",
-                        );
-                    }
-                    VariantData::Tuple(..) => {
-                        err.span_label(
-                            variant.span,
-                            "tuple variant defined here",
-                        );
-                    }
-                    VariantData::Unit(..) => {}
-                }
-            }
-            err.emit();
-        }
-    }
-
-    crate fn maybe_recover_from_bad_type_plus(
+    pub(super) fn maybe_recover_from_bad_type_plus(
         &mut self,
         allow_plus: bool,
         ty: &Ty,
@@ -695,7 +775,7 @@ impl<'a> Parser<'a> {
     /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
     /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
     /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
-    crate fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
+    pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
         &mut self,
         base: P<T>,
         allow_recovery: bool,
@@ -711,7 +791,7 @@ impl<'a> Parser<'a> {
 
     /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
     /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
-    crate fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
+    pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
         &mut self,
         ty_span: Span,
         ty: P<Ty>,
@@ -734,7 +814,7 @@ impl<'a> Parser<'a> {
                 // This is a best-effort recovery.
                 path.span,
                 "try",
-                format!("<{}>::{}", ty_str, path),
+                format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
                 Applicability::MaybeIncorrect,
             )
             .emit();
@@ -750,7 +830,7 @@ impl<'a> Parser<'a> {
         )))
     }
 
-    crate fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
+    pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
         if self.eat(&token::Semi) {
             let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
             err.span_suggestion_short(
@@ -786,7 +866,7 @@ impl<'a> Parser<'a> {
 
     /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
     /// closing delimiter.
-    pub fn unexpected_try_recover(
+    pub(super) fn unexpected_try_recover(
         &mut self,
         t: &TokenKind,
     ) -> PResult<'a, bool /* recovered */> {
@@ -836,7 +916,7 @@ impl<'a> Parser<'a> {
         Err(err)
     }
 
-    crate fn parse_semi_or_incorrect_foreign_fn_body(
+    pub(super) fn parse_semi_or_incorrect_foreign_fn_body(
         &mut self,
         ident: &Ident,
         extern_sp: Span,
@@ -874,7 +954,7 @@ impl<'a> Parser<'a> {
 
     /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
     /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
-    crate fn parse_incorrect_await_syntax(
+    pub(super) fn parse_incorrect_await_syntax(
         &mut self,
         lo: Span,
         await_sp: Span,
@@ -926,7 +1006,7 @@ impl<'a> Parser<'a> {
     }
 
     /// If encountering `future.await()`, consumes and emits an error.
-    crate fn recover_from_await_method_call(&mut self) {
+    pub(super) fn recover_from_await_method_call(&mut self) {
         if self.token == token::OpenDelim(token::Paren) &&
             self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
         {
@@ -949,7 +1029,7 @@ impl<'a> Parser<'a> {
     /// and suggest writing `for $pat in $expr` instead.
     ///
     /// This should be called before parsing the `$block`.
-    crate fn recover_parens_around_for_head(
+    pub(super) fn recover_parens_around_for_head(
         &mut self,
         pat: P<Pat>,
         expr: &Expr,
@@ -987,7 +1067,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
+    pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
         self.token.is_ident() &&
             if let ast::ExprKind::Path(..) = node { true } else { false } &&
             !self.token.is_reserved_ident() &&           // v `foo:bar(baz)`
@@ -1001,7 +1081,7 @@ impl<'a> Parser<'a> {
              self.look_ahead(2, |t| t == &token::Lt))  // `foo:bar::<baz>`
     }
 
-    crate fn recover_seq_parse_error(
+    pub(super) fn recover_seq_parse_error(
         &mut self,
         delim: token::DelimToken,
         lo: Span,
@@ -1018,7 +1098,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn recover_closing_delimiter(
+    pub(super) fn recover_closing_delimiter(
         &mut self,
         tokens: &[TokenKind],
         mut err: DiagnosticBuilder<'a>,
@@ -1069,7 +1149,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid.
-    crate fn eat_bad_pub(&mut self) {
+    pub(super) fn eat_bad_pub(&mut self) {
         if self.token.is_keyword(kw::Pub) {
             match self.parse_visibility(false) {
                 Ok(vis) => {
@@ -1087,7 +1167,7 @@ impl<'a> Parser<'a> {
     /// statement. This is something of a best-effort heuristic.
     ///
     /// We terminate when we find an unmatched `}` (without consuming it).
-    crate fn recover_stmt(&mut self) {
+    pub(super) fn recover_stmt(&mut self) {
         self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
     }
 
@@ -1098,7 +1178,11 @@ impl<'a> Parser<'a> {
     ///
     /// If `break_on_block` is `Break`, then we will stop consuming tokens
     /// after finding (and consuming) a brace-delimited block.
-    crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
+    pub(super) fn recover_stmt_(
+        &mut self,
+        break_on_semi: SemiColonMode,
+        break_on_block: BlockMode,
+    ) {
         let mut brace_depth = 0;
         let mut bracket_depth = 0;
         let mut in_block = false;
@@ -1166,7 +1250,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn check_for_for_in_in_typo(&mut self, in_span: Span) {
+    pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
         if self.eat_keyword(kw::In) {
             // a common typo: `for _ in in bar {}`
             self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`")
@@ -1180,14 +1264,14 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
+    pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
         let token_str = self.this_token_descr();
         let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str));
         err.span_label(self.token.span, "expected `;` or `{`");
         Err(err)
     }
 
-    crate fn eat_incorrect_doc_comment_for_param_type(&mut self) {
+    pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
         if let token::DocComment(_) = self.token.kind {
             self.struct_span_err(
                 self.token.span,
@@ -1215,7 +1299,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn parameter_without_type(
+    pub(super) fn parameter_without_type(
         &mut self,
         err: &mut DiagnosticBuilder<'_>,
         pat: P<ast::Pat>,
@@ -1278,7 +1362,7 @@ impl<'a> Parser<'a> {
         None
     }
 
-    crate fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
+    pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
         let pat = self.parse_pat(Some("argument name"))?;
         self.expect(&token::Colon)?;
         let ty = self.parse_ty()?;
@@ -1306,7 +1390,7 @@ impl<'a> Parser<'a> {
         Ok((pat, ty))
     }
 
-    crate fn recover_bad_self_param(
+    pub(super) fn recover_bad_self_param(
         &mut self,
         mut param: ast::Param,
         is_trait_item: bool,
@@ -1324,7 +1408,7 @@ impl<'a> Parser<'a> {
         Ok(param)
     }
 
-    crate fn consume_block(&mut self, delim: token::DelimToken) {
+    pub(super) fn consume_block(&mut self, delim: token::DelimToken) {
         let mut brace_depth = 0;
         loop {
             if self.eat(&token::OpenDelim(delim)) {
@@ -1344,7 +1428,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
+    pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
         let (span, msg) = match (&self.token.kind, self.subparser_name) {
             (&token::Eof, Some(origin)) => {
                 let sp = self.sess.source_map().next_point(self.token.span);
@@ -1364,6 +1448,23 @@ impl<'a> Parser<'a> {
         err
     }
 
+    fn consume_tts(
+        &mut self,
+        mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
+        // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
+        modifier: &[(token::TokenKind, i64)],
+    ) {
+        while acc > 0 {
+            if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
+                acc += *val;
+            }
+            if self.token.kind == token::Eof {
+                break;
+            }
+            self.bump();
+        }
+    }
+
     /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors.
     ///
     /// This is necessary because at this point we don't know whether we parsed a function with
@@ -1372,7 +1473,7 @@ impl<'a> Parser<'a> {
     /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
     /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
     /// we deduplicate them to not complain about duplicated parameter names.
-    crate fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
+    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
         let mut seen_inputs = FxHashSet::default();
         for input in fn_inputs.iter_mut() {
             let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = (
diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs
index 23674ad589d..67a530ec683 100644
--- a/src/libsyntax/parse/parser/expr.rs
+++ b/src/libsyntax/parse/parser/expr.rs
@@ -1,18 +1,18 @@
-use super::{
-    Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode, SemiColonMode,
-    SeqSep, TokenExpectType,
-};
+use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode};
+use super::{SemiColonMode, SeqSep, TokenExpectType};
 use super::pat::{GateOr, PARAM_EXPECTED};
+use super::diagnostics::Error;
+
+use crate::parse::literal::LitError;
 
 use crate::ast::{
     self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode,
     Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind,
-    FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field,
+    FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit,
 };
 use crate::maybe_recover_from_interpolated_ty_qpath;
 use crate::parse::classify;
-use crate::parse::token::{self, Token};
-use crate::parse::diagnostics::Error;
+use crate::parse::token::{self, Token, TokenKind};
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::source_map::{self, Span};
@@ -20,6 +20,7 @@ use crate::symbol::{kw, sym};
 use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
 
 use errors::Applicability;
+use syntax_pos::Symbol;
 use std::mem;
 use rustc_data_structures::thin_vec::ThinVec;
 
@@ -238,7 +239,9 @@ impl<'a> Parser<'a> {
 
             self.bump();
             if op.is_comparison() {
-                self.check_no_chained_comparison(&lhs, &op)?;
+                if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
+                    return Ok(expr);
+                }
             }
             // Special cases:
             if op == AssocOp::As {
@@ -420,7 +423,7 @@ impl<'a> Parser<'a> {
                 self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator")
                     .span_suggestion_short(
                         span_of_tilde,
-                        "use `!` to perform bitwise negation",
+                        "use `!` to perform bitwise not",
                         "!".to_owned(),
                         Applicability::MachineApplicable
                     )
@@ -550,8 +553,11 @@ impl<'a> Parser<'a> {
 
                         // Report non-fatal diagnostics, keep `x as usize` as an expression
                         // in AST and continue parsing.
-                        let msg = format!("`<` is interpreted as a start of generic \
-                                           arguments for `{}`, not a {}", path, op_noun);
+                        let msg = format!(
+                            "`<` is interpreted as a start of generic arguments for `{}`, not a {}",
+                            pprust::path_to_string(&path),
+                            op_noun,
+                        );
                         let span_after_type = parser_snapshot_after_type.token.span;
                         let expr = mk_expr(self, P(Ty {
                             span: path.span,
@@ -1067,8 +1073,167 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr, true)
     }
 
+    /// Matches `lit = true | false | token_lit`.
+    pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
+        let mut recovered = None;
+        if self.token == token::Dot {
+            // Attempt to recover `.4` as `0.4`.
+            recovered = self.look_ahead(1, |next_token| {
+                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
+                        = next_token.kind {
+                    if self.token.span.hi() == next_token.span.lo() {
+                        let s = String::from("0.") + &symbol.as_str();
+                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
+                        return Some(Token::new(kind, self.token.span.to(next_token.span)));
+                    }
+                }
+                None
+            });
+            if let Some(token) = &recovered {
+                self.bump();
+                self.struct_span_err(token.span, "float literals must have an integer part")
+                    .span_suggestion(
+                        token.span,
+                        "must have an integer part",
+                        pprust::token_to_string(token),
+                        Applicability::MachineApplicable,
+                    )
+                    .emit();
+            }
+        }
+
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        match Lit::from_token(token) {
+            Ok(lit) => {
+                self.bump();
+                Ok(lit)
+            }
+            Err(LitError::NotLiteral) => {
+                let msg = format!("unexpected token: {}", self.this_token_descr());
+                Err(self.span_fatal(token.span, &msg))
+            }
+            Err(err) => {
+                let (lit, span) = (token.expect_lit(), token.span);
+                self.bump();
+                self.error_literal_from_token(err, lit, span);
+                // Pack possible quotes and prefixes from the original literal into
+                // the error literal's symbol so they can be pretty-printed faithfully.
+                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+                let symbol = Symbol::intern(&suffixless_lit.to_string());
+                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+                Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
+            }
+        }
+    }
+
+    fn error_literal_from_token(&self, err: LitError, lit: token::Lit, span: Span) {
+        // Checks if `s` looks like i32 or u1234 etc.
+        fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
+            s.len() > 1
+            && s.starts_with(first_chars)
+            && s[1..].chars().all(|c| c.is_ascii_digit())
+        }
+
+        let token::Lit { kind, suffix, .. } = lit;
+        match err {
+            // `NotLiteral` is not an error by itself, so we don't report
+            // it and give the parser opportunity to try something else.
+            LitError::NotLiteral => {}
+            // `LexerError` *is* an error, but it was already reported
+            // by lexer, so here we don't report it the second time.
+            LitError::LexerError => {}
+            LitError::InvalidSuffix => {
+                self.expect_no_suffix(
+                    span,
+                    &format!("{} {} literal", kind.article(), kind.descr()),
+                    suffix,
+                );
+            }
+            LitError::InvalidIntSuffix => {
+                let suf = suffix.expect("suffix error with no suffix").as_str();
+                if looks_like_width_suffix(&['i', 'u'], &suf) {
+                    // If it looks like a width, try to be helpful.
+                    let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+                    self.struct_span_err(span, &msg)
+                        .help("valid widths are 8, 16, 32, 64 and 128")
+                        .emit();
+                } else {
+                    let msg = format!("invalid suffix `{}` for integer literal", suf);
+                    self.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
+                        .emit();
+                }
+            }
+            LitError::InvalidFloatSuffix => {
+                let suf = suffix.expect("suffix error with no suffix").as_str();
+                if looks_like_width_suffix(&['f'], &suf) {
+                    // If it looks like a width, try to be helpful.
+                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+                    self.struct_span_err(span, &msg)
+                        .help("valid widths are 32 and 64")
+                        .emit();
+                } else {
+                    let msg = format!("invalid suffix `{}` for float literal", suf);
+                    self.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("valid suffixes are `f32` and `f64`")
+                        .emit();
+                }
+            }
+            LitError::NonDecimalFloat(base) => {
+                let descr = match base {
+                    16 => "hexadecimal",
+                    8 => "octal",
+                    2 => "binary",
+                    _ => unreachable!(),
+                };
+                self.struct_span_err(span, &format!("{} float literal is not supported", descr))
+                    .span_label(span, "not supported")
+                    .emit();
+            }
+            LitError::IntTooLarge => {
+                self.struct_span_err(span, "integer literal is too large")
+                    .emit();
+            }
+        }
+    }
+
+    pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
+        if let Some(suf) = suffix {
+            let mut err = if kind == "a tuple index"
+                && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
+            {
+                // #59553: warn instead of reject out of hand to allow the fix to percolate
+                // through the ecosystem when people fix their macros
+                let mut err = self.sess.span_diagnostic.struct_span_warn(
+                    sp,
+                    &format!("suffixes on {} are invalid", kind),
+                );
+                err.note(&format!(
+                    "`{}` is *temporarily* accepted on tuple index fields as it was \
+                        incorrectly accepted on stable for a few releases",
+                    suf,
+                ));
+                err.help(
+                    "on proc macros, you'll want to use `syn::Index::from` or \
+                        `proc_macro::Literal::*_unsuffixed` for code that will desugar \
+                        to tuple field access",
+                );
+                err.note(
+                    "for more context, see https://github.com/rust-lang/rust/issues/60210",
+                );
+                err
+            } else {
+                self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+            };
+            err.span_label(sp, format!("invalid suffix `{}`", suf));
+            err.emit();
+        }
+    }
+
     /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
-    crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
+    pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
         let minus_lo = self.token.span;
@@ -1088,7 +1253,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a block or unsafe block.
-    crate fn parse_block_expr(
+    pub(super) fn parse_block_expr(
         &mut self,
         opt_label: Option<Label>,
         lo: Span,
@@ -1393,7 +1558,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
     }
 
-    crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
+    pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
         let attrs = self.parse_outer_attributes()?;
         let lo = self.token.span;
         let pat = self.parse_top_pat(GateOr::No)?;
@@ -1501,7 +1666,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an `async move? {...}` expression.
-    pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+    fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
         let span_lo = self.token.span;
         self.expect_keyword(kw::Async)?;
         let capture_clause = self.parse_capture_clause();
@@ -1781,4 +1946,8 @@ impl<'a> Parser<'a> {
     crate fn mk_expr(&self, span: Span, kind: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
         P(Expr { kind, span, attrs, id: DUMMY_NODE_ID })
     }
+
+    pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
+        self.mk_expr(span, ExprKind::Err, ThinVec::new())
+    }
 }
diff --git a/src/libsyntax/parse/parser/generics.rs b/src/libsyntax/parse/parser/generics.rs
index 2ecd9cca3c6..bfcb0042a75 100644
--- a/src/libsyntax/parse/parser/generics.rs
+++ b/src/libsyntax/parse/parser/generics.rs
@@ -74,7 +74,7 @@ impl<'a> Parser<'a> {
 
     /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
     /// a trailing comma and erroneous trailing attributes.
-    crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
+    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
         let mut params = Vec::new();
         loop {
             let attrs = self.parse_outer_attributes()?;
diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs
index c00a5807d52..0acfd1450d8 100644
--- a/src/libsyntax/parse/parser/item.rs
+++ b/src/libsyntax/parse/parser/item.rs
@@ -1,34 +1,29 @@
 use super::{Parser, PResult, PathStyle, SemiColonMode, BlockMode};
+use super::diagnostics::{Error, dummy_arg};
 
 use crate::maybe_whole;
 use crate::ptr::P;
-use crate::ast::{
-    self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle,
-    Item, ItemKind, ImplItem, TraitItem, TraitItemKind,
-    UseTree, UseTreeKind, PathSegment,
-    IsAuto, Constness, IsAsync, Unsafety, Defaultness,
-    Visibility, VisibilityKind, Mutability, FnDecl, FnHeader, MethodSig, Block,
-    ForeignItem, ForeignItemKind,
-    Ty, TyKind, Generics, GenericBounds, TraitRef,
-    EnumDef, VariantData, StructField, AnonConst,
-    Mac, MacDelimiter,
-};
-use crate::ext::base::DummyResult;
+use crate::ast::{self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle, AnonConst, Item, ItemKind};
+use crate::ast::{ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind};
+use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness};
+use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
+use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField};
+use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, MethodSig, SelfKind, Param};
 use crate::parse::token;
 use crate::parse::parser::maybe_append;
-use crate::parse::diagnostics::Error;
 use crate::tokenstream::{TokenTree, TokenStream};
-use crate::source_map::{respan, Span};
 use crate::symbol::{kw, sym};
+use crate::source_map::{self, respan, Span};
+use crate::ThinVec;
 
-use std::mem;
 use log::debug;
+use std::mem;
 use rustc_target::spec::abi::Abi;
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId, StashKey};
 
 /// Whether the type alias or associated type is a concrete type or an opaque type.
 #[derive(Debug)]
-pub enum AliasKind {
+pub(super) enum AliasKind {
     /// Just a new name for the same type.
     Weak(P<Ty>),
     /// Only trait impls of the type will be usable, not the actual type itself.
@@ -98,7 +93,7 @@ impl<'a> Parser<'a> {
 
         let lo = self.token.span;
 
-        let visibility = self.parse_visibility(false)?;
+        let vis = self.parse_visibility(false)?;
 
         if self.eat_keyword(kw::Use) {
             // USE ITEM
@@ -106,15 +101,14 @@ impl<'a> Parser<'a> {
             self.expect(&token::Semi)?;
 
             let span = lo.to(self.prev_span);
-            let item =
-                self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
+            let item = self.mk_item(span, Ident::invalid(), item_, vis, attrs);
             return Ok(Some(item));
         }
 
         if self.eat_keyword(kw::Extern) {
             let extern_sp = self.prev_span;
             if self.eat_keyword(kw::Crate) {
-                return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
+                return Ok(Some(self.parse_item_extern_crate(lo, vis, attrs)?));
             }
 
             let opt_abi = self.parse_opt_abi()?;
@@ -128,10 +122,10 @@ impl<'a> Parser<'a> {
                     constness: respan(fn_span, Constness::NotConst),
                     abi: opt_abi.unwrap_or(Abi::C),
                 };
-                return self.parse_item_fn(lo, visibility, attrs, header);
+                return self.parse_item_fn(lo, vis, attrs, header);
             } else if self.check(&token::OpenDelim(token::Brace)) {
                 return Ok(Some(
-                    self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs, extern_sp)?,
+                    self.parse_item_foreign_mod(lo, opt_abi, vis, attrs, extern_sp)?,
                 ));
             }
 
@@ -142,26 +136,31 @@ impl<'a> Parser<'a> {
             self.bump();
             // STATIC ITEM
             let m = self.parse_mutability();
-            let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_const(Some(m))?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.eat_keyword(kw::Const) {
             let const_span = self.prev_span;
-            if self.check_keyword(kw::Fn)
-                || (self.check_keyword(kw::Unsafe)
-                    && self.is_keyword_ahead(1, &[kw::Fn])) {
+            if [kw::Fn, kw::Unsafe, kw::Extern].iter().any(|k| self.check_keyword(*k)) {
                 // CONST FUNCTION ITEM
                 let unsafety = self.parse_unsafety();
-                self.bump();
+
+                if self.check_keyword(kw::Extern) {
+                    self.sess.gated_spans.const_extern_fn.borrow_mut().push(
+                        lo.to(self.token.span)
+                    );
+                }
+                let abi = self.parse_extern_abi()?;
+                self.bump(); // `fn`
+
                 let header = FnHeader {
                     unsafety,
                     asyncness: respan(const_span, IsAsync::NotAsync),
                     constness: respan(const_span, Constness::Const),
-                    abi: Abi::Rust,
+                    abi,
                 };
-                return self.parse_item_fn(lo, visibility, attrs, header);
+                return self.parse_item_fn(lo, vis, attrs, header);
             }
 
             // CONST ITEM
@@ -177,10 +176,9 @@ impl<'a> Parser<'a> {
                     )
                     .emit();
             }
-            let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+
+            let info = self.parse_item_const(None)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
 
         // Parses `async unsafe? fn`.
@@ -205,40 +203,33 @@ impl<'a> Parser<'a> {
                     constness: respan(fn_span, Constness::NotConst),
                     abi: Abi::Rust,
                 };
-                return self.parse_item_fn(lo, visibility, attrs, header);
+                return self.parse_item_fn(lo, vis, attrs, header);
             }
         }
+
         if self.check_keyword(kw::Unsafe) &&
             self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
         {
             // UNSAFE TRAIT ITEM
             self.bump(); // `unsafe`
-            let is_auto = if self.eat_keyword(kw::Trait) {
-                IsAuto::No
-            } else {
-                self.expect_keyword(kw::Auto)?;
-                self.expect_keyword(kw::Trait)?;
-                IsAuto::Yes
-            };
-            let (ident, item_, extra_attrs) = self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_trait(Unsafety::Unsafe)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.check_keyword(kw::Impl) ||
            self.check_keyword(kw::Unsafe) &&
                 self.is_keyword_ahead(1, &[kw::Impl]) ||
            self.check_keyword(kw::Default) &&
-                self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe]) {
+                self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe])
+        {
             // IMPL ITEM
             let defaultness = self.parse_defaultness();
             let unsafety = self.parse_unsafety();
             self.expect_keyword(kw::Impl)?;
-            let (ident, item_, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_impl(unsafety, defaultness)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.check_keyword(kw::Fn) {
             // FUNCTION ITEM
             self.bump();
@@ -249,19 +240,17 @@ impl<'a> Parser<'a> {
                 constness: respan(fn_span, Constness::NotConst),
                 abi: Abi::Rust,
             };
-            return self.parse_item_fn(lo, visibility, attrs, header);
+            return self.parse_item_fn(lo, vis, attrs, header);
         }
+
         if self.check_keyword(kw::Unsafe)
-            && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
+            && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace))
+        {
             // UNSAFE FUNCTION ITEM
             self.bump(); // `unsafe`
             // `{` is also expected after `unsafe`; in case of error, include it in the diagnostic.
             self.check(&token::OpenDelim(token::Brace));
-            let abi = if self.eat_keyword(kw::Extern) {
-                self.parse_opt_abi()?.unwrap_or(Abi::C)
-            } else {
-                Abi::Rust
-            };
+            let abi = self.parse_extern_abi()?;
             self.expect_keyword(kw::Fn)?;
             let fn_span = self.prev_span;
             let header = FnHeader {
@@ -270,15 +259,15 @@ impl<'a> Parser<'a> {
                 constness: respan(fn_span, Constness::NotConst),
                 abi,
             };
-            return self.parse_item_fn(lo, visibility, attrs, header);
+            return self.parse_item_fn(lo, vis, attrs, header);
         }
+
         if self.eat_keyword(kw::Mod) {
             // MODULE ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_mod(&attrs[..])?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if let Some(type_) = self.eat_type() {
             let (ident, alias, generics) = type_?;
             // TYPE ITEM
@@ -287,54 +276,44 @@ impl<'a> Parser<'a> {
                 AliasKind::OpaqueTy(bounds) => ItemKind::OpaqueTy(bounds, generics),
             };
             let span = lo.to(self.prev_span);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            return Ok(Some(self.mk_item(span, ident, item_, vis, attrs)));
         }
+
         if self.eat_keyword(kw::Enum) {
             // ENUM ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_enum()?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_enum()?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.check_keyword(kw::Trait)
             || (self.check_keyword(kw::Auto)
                 && self.is_keyword_ahead(1, &[kw::Trait]))
         {
-            let is_auto = if self.eat_keyword(kw::Trait) {
-                IsAuto::No
-            } else {
-                self.expect_keyword(kw::Auto)?;
-                self.expect_keyword(kw::Trait)?;
-                IsAuto::Yes
-            };
             // TRAIT ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_trait(is_auto, Unsafety::Normal)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_trait(Unsafety::Normal)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.eat_keyword(kw::Struct) {
             // STRUCT ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_struct()?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_struct()?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.is_union_item() {
             // UNION ITEM
             self.bump();
-            let (ident, item_, extra_attrs) = self.parse_item_union()?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_union()?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
-        if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
+
+        if let Some(macro_def) = self.eat_macro_def(&attrs, &vis, lo)? {
             return Ok(Some(macro_def));
         }
 
         // Verify whether we have encountered a struct or method definition where the user forgot to
         // add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
-        if visibility.node.is_pub() &&
+        if vis.node.is_pub() &&
             self.check_ident() &&
             self.look_ahead(1, |t| *t != token::Not)
         {
@@ -425,17 +404,20 @@ impl<'a> Parser<'a> {
                 return Err(err);
             }
         }
-        self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
+        self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, vis)
     }
 
-    fn recover_first_param(&mut self) -> &'static str {
-        match self.parse_outer_attributes()
-            .and_then(|_| self.parse_self_param())
-            .map_err(|mut e| e.cancel())
-        {
-            Ok(Some(_)) => "method",
-            _ => "function",
-        }
+    pub(super) fn mk_item_with_info(
+        &self,
+        attrs: Vec<Attribute>,
+        lo: Span,
+        vis: Visibility,
+        info: ItemInfo,
+    ) -> PResult<'a, Option<P<Item>>> {
+        let (ident, item, extra_attrs) = info;
+        let span = lo.to(self.prev_span);
+        let attrs = maybe_append(attrs, extra_attrs);
+        Ok(Some(self.mk_item(span, ident, item, vis, attrs)))
     }
 
     /// This is the fall-through for parsing items.
@@ -623,7 +605,7 @@ impl<'a> Parser<'a> {
         let ty_second = if self.token == token::DotDot {
             // We need to report this error after `cfg` expansion for compatibility reasons
             self.bump(); // `..`, do not add it to expected tokens
-            Some(DummyResult::raw_ty(self.prev_span, true))
+            Some(self.mk_ty(self.prev_span, TyKind::Err))
         } else if has_for || self.token.can_begin_type() {
             Some(self.parse_ty()?)
         } else {
@@ -710,9 +692,11 @@ impl<'a> Parser<'a> {
         Ok(item)
     }
 
-    fn parse_impl_item_(&mut self,
-                        at_end: &mut bool,
-                        mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
+    fn parse_impl_item_(
+        &mut self,
+        at_end: &mut bool,
+        mut attrs: Vec<Attribute>,
+    ) -> PResult<'a, ImplItem> {
         let lo = self.token.span;
         let vis = self.parse_visibility(false)?;
         let defaultness = self.parse_defaultness();
@@ -724,18 +708,12 @@ impl<'a> Parser<'a> {
             };
             (name, kind, generics)
         } else if self.is_const_item() {
-            // This parses the grammar:
-            //     ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
-            self.expect_keyword(kw::Const)?;
-            let name = self.parse_ident()?;
-            self.expect(&token::Colon)?;
-            let typ = self.parse_ty()?;
-            self.expect(&token::Eq)?;
-            let expr = self.parse_expr()?;
-            self.expect(&token::Semi)?;
-            (name, ast::ImplItemKind::Const(typ, expr), Generics::default())
+            self.parse_impl_const()?
+        } else if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(&vis), at_end)? {
+            // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction!
+            (Ident::invalid(), ast::ImplItemKind::Macro(mac), Generics::default())
         } else {
-            let (name, inner_attrs, generics, kind) = self.parse_impl_method(&vis, at_end)?;
+            let (name, inner_attrs, generics, kind) = self.parse_impl_method(at_end)?;
             attrs.extend(inner_attrs);
             (name, kind, generics)
         };
@@ -782,76 +760,29 @@ impl<'a> Parser<'a> {
             !self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe])
     }
 
-    /// Parses a method or a macro invocation in a trait impl.
-    fn parse_impl_method(
-        &mut self,
-        vis: &Visibility,
-        at_end: &mut bool
-    ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ast::ImplItemKind)> {
-        // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction!
-        if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
-            // method macro
-            Ok((Ident::invalid(), vec![], Generics::default(), ast::ImplItemKind::Macro(mac)))
-        } else {
-            let (ident, sig, generics) = self.parse_method_sig(|_| true)?;
-            *at_end = true;
-            let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-            Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body)))
-        }
-    }
-
-    /// Parse the "signature", including the identifier, parameters, and generics
-    /// of a method. The body is not parsed as that differs between `trait`s and `impl`s.
-    fn parse_method_sig(
-        &mut self,
-        is_name_required: impl Copy + Fn(&token::Token) -> bool,
-    ) -> PResult<'a, (Ident, MethodSig, Generics)> {
-        let header = self.parse_fn_front_matter()?;
-        let (ident, mut generics) = self.parse_fn_header()?;
-        let decl = self.parse_fn_decl_with_self(is_name_required)?;
-        let sig = MethodSig { header, decl };
-        generics.where_clause = self.parse_where_clause()?;
-        Ok((ident, sig, generics))
+    /// This parses the grammar:
+    ///     ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
+    fn parse_impl_const(&mut self) -> PResult<'a, (Ident, ImplItemKind, Generics)> {
+        self.expect_keyword(kw::Const)?;
+        let name = self.parse_ident()?;
+        self.expect(&token::Colon)?;
+        let typ = self.parse_ty()?;
+        self.expect(&token::Eq)?;
+        let expr = self.parse_expr()?;
+        self.expect(&token::Semi)?;
+        Ok((name, ImplItemKind::Const(typ, expr), Generics::default()))
     }
 
-    /// Parses all the "front matter" for a `fn` declaration, up to
-    /// and including the `fn` keyword:
-    ///
-    /// - `const fn`
-    /// - `unsafe fn`
-    /// - `const unsafe fn`
-    /// - `extern fn`
-    /// - etc.
-    fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> {
-        let is_const_fn = self.eat_keyword(kw::Const);
-        let const_span = self.prev_span;
-        let asyncness = self.parse_asyncness();
-        if let IsAsync::Async { .. } = asyncness {
-            self.ban_async_in_2015(self.prev_span);
-        }
-        let asyncness = respan(self.prev_span, asyncness);
-        let unsafety = self.parse_unsafety();
-        let (constness, unsafety, abi) = if is_const_fn {
-            (respan(const_span, Constness::Const), unsafety, Abi::Rust)
+    /// Parses `auto? trait Foo { ... }` or `trait Foo = Bar;`.
+    fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
+        // Parse optional `auto` prefix.
+        let is_auto = if self.eat_keyword(kw::Auto) {
+            IsAuto::Yes
         } else {
-            let abi = if self.eat_keyword(kw::Extern) {
-                self.parse_opt_abi()?.unwrap_or(Abi::C)
-            } else {
-                Abi::Rust
-            };
-            (respan(self.prev_span, Constness::NotConst), unsafety, abi)
+            IsAuto::No
         };
-        if !self.eat_keyword(kw::Fn) {
-            // It is possible for `expect_one_of` to recover given the contents of
-            // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
-            // account for this.
-            if !self.expect_one_of(&[], &[])? { unreachable!() }
-        }
-        Ok(FnHeader { constness, unsafety, asyncness, abi })
-    }
 
-    /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
-    fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
+        self.expect_keyword(kw::Trait)?;
         let ident = self.parse_ident()?;
         let mut tps = self.parse_generics()?;
 
@@ -936,38 +867,22 @@ impl<'a> Parser<'a> {
         Ok(item)
     }
 
-    fn parse_trait_item_(&mut self,
-                         at_end: &mut bool,
-                         mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
+    fn parse_trait_item_(
+        &mut self,
+        at_end: &mut bool,
+        mut attrs: Vec<Attribute>,
+    ) -> PResult<'a, TraitItem> {
         let lo = self.token.span;
         self.eat_bad_pub();
         let (name, kind, generics) = if self.eat_keyword(kw::Type) {
             self.parse_trait_item_assoc_ty()?
         } else if self.is_const_item() {
-            self.expect_keyword(kw::Const)?;
-            let ident = self.parse_ident()?;
-            self.expect(&token::Colon)?;
-            let ty = self.parse_ty()?;
-            let default = if self.eat(&token::Eq) {
-                let expr = self.parse_expr()?;
-                self.expect(&token::Semi)?;
-                Some(expr)
-            } else {
-                self.expect(&token::Semi)?;
-                None
-            };
-            (ident, TraitItemKind::Const(ty, default), Generics::default())
+            self.parse_trait_item_const()?
         } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
             // trait item macro.
-            (Ident::invalid(), ast::TraitItemKind::Macro(mac), Generics::default())
+            (Ident::invalid(), TraitItemKind::Macro(mac), Generics::default())
         } else {
-            // This is somewhat dubious; We don't want to allow
-            // argument names to be left off if there is a definition...
-            //
-            // We don't allow argument names to be left off in edition 2018.
-            let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?;
-            let body = self.parse_trait_method_body(at_end, &mut attrs)?;
-            (ident, ast::TraitItemKind::Method(sig, body), generics)
+            self.parse_trait_item_method(at_end, &mut attrs)?
         };
 
         Ok(TraitItem {
@@ -981,48 +896,24 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse the "body" of a method in a trait item definition.
-    /// This can either be `;` when there's no body,
-    /// or e.g. a block when the method is a provided one.
-    fn parse_trait_method_body(
-        &mut self,
-        at_end: &mut bool,
-        attrs: &mut Vec<Attribute>,
-    ) -> PResult<'a, Option<P<Block>>> {
-        Ok(match self.token.kind {
-            token::Semi => {
-                debug!("parse_trait_method_body(): parsing required method");
-                self.bump();
-                *at_end = true;
-                None
-            }
-            token::OpenDelim(token::Brace) => {
-                debug!("parse_trait_method_body(): parsing provided method");
-                *at_end = true;
-                let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-                attrs.extend(inner_attrs.iter().cloned());
-                Some(body)
-            }
-            token::Interpolated(ref nt) => {
-                match **nt {
-                    token::NtBlock(..) => {
-                        *at_end = true;
-                        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-                        attrs.extend(inner_attrs.iter().cloned());
-                        Some(body)
-                    }
-                    _ => return self.expected_semi_or_open_brace(),
-                }
-            }
-            _ => return self.expected_semi_or_open_brace(),
-        })
+    fn parse_trait_item_const(&mut self) -> PResult<'a, (Ident, TraitItemKind, Generics)> {
+        self.expect_keyword(kw::Const)?;
+        let ident = self.parse_ident()?;
+        self.expect(&token::Colon)?;
+        let ty = self.parse_ty()?;
+        let default = if self.eat(&token::Eq) {
+            Some(self.parse_expr()?)
+        } else {
+            None
+        };
+        self.expect(&token::Semi)?;
+        Ok((ident, TraitItemKind::Const(ty, default), Generics::default()))
     }
 
     /// Parses the following grammar:
     ///
     ///     TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
-    fn parse_trait_item_assoc_ty(&mut self)
-        -> PResult<'a, (Ident, TraitItemKind, Generics)> {
+    fn parse_trait_item_assoc_ty(&mut self) -> PResult<'a, (Ident, TraitItemKind, Generics)> {
         let ident = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
 
@@ -1068,21 +959,13 @@ impl<'a> Parser<'a> {
                 );
             }
 
-            if self.eat(&token::BinOp(token::Star)) {
-                UseTreeKind::Glob
-            } else {
-                UseTreeKind::Nested(self.parse_use_tree_list()?)
-            }
+            self.parse_use_tree_glob_or_nested()?
         } else {
             // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
             prefix = self.parse_path(PathStyle::Mod)?;
 
             if self.eat(&token::ModSep) {
-                if self.eat(&token::BinOp(token::Star)) {
-                    UseTreeKind::Glob
-                } else {
-                    UseTreeKind::Nested(self.parse_use_tree_list()?)
-                }
+                self.parse_use_tree_glob_or_nested()?
             } else {
                 UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
             }
@@ -1091,6 +974,15 @@ impl<'a> Parser<'a> {
         Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
     }
 
+    /// Parses `*` or `{...}`.
+    fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
+        Ok(if self.eat(&token::BinOp(token::Star)) {
+            UseTreeKind::Glob
+        } else {
+            UseTreeKind::Nested(self.parse_use_tree_list()?)
+        })
+    }
+
     /// Parses a `UseTreeKind::Nested(list)`.
     ///
     /// ```
@@ -1184,49 +1076,6 @@ impl<'a> Parser<'a> {
         Ok(ident)
     }
 
-    /// Parses an item-position function declaration.
-    fn parse_item_fn(
-        &mut self,
-        lo: Span,
-        vis: Visibility,
-        attrs: Vec<Attribute>,
-        header: FnHeader,
-    ) -> PResult<'a, Option<P<Item>>> {
-        let allow_c_variadic = header.abi == Abi::C && header.unsafety == Unsafety::Unsafe;
-        let (ident, decl, generics) = self.parse_fn_sig(allow_c_variadic)?;
-        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-        let span = lo.to(self.prev_span);
-        let kind = ItemKind::Fn(decl, header, generics, body);
-        let attrs = maybe_append(attrs, Some(inner_attrs));
-        Ok(Some(self.mk_item(span, ident, kind, vis, attrs)))
-    }
-
-    /// Parse the "signature", including the identifier, parameters, and generics of a function.
-    fn parse_fn_sig(
-        &mut self,
-        allow_c_variadic: bool,
-    ) -> PResult<'a, (Ident, P<FnDecl>, Generics)> {
-        let (ident, mut generics) = self.parse_fn_header()?;
-        let decl = self.parse_fn_decl(allow_c_variadic)?;
-        generics.where_clause = self.parse_where_clause()?;
-        Ok((ident, decl, generics))
-    }
-
-    /// Parses the name and optional generic types of a function header.
-    fn parse_fn_header(&mut self) -> PResult<'a, (Ident, Generics)> {
-        let id = self.parse_ident()?;
-        let generics = self.parse_generics()?;
-        Ok((id, generics))
-    }
-
-    /// Parses the parameter list and result type of a function declaration.
-    fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
-        Ok(P(FnDecl {
-            inputs: self.parse_fn_params(true, allow_c_variadic)?,
-            output: self.parse_ret_ty(true)?,
-        }))
-    }
-
     /// Parses `extern` for foreign ABIs modules.
     ///
     /// `extern` is expected to have been
@@ -1267,7 +1116,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a foreign item.
-    crate fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> {
+    pub fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> {
         maybe_whole!(self, NtForeignItem, |ni| ni);
 
         let attrs = self.parse_outer_attributes()?;
@@ -1278,14 +1127,30 @@ impl<'a> Parser<'a> {
         // Treat `const` as `static` for error recovery, but don't add it to expected tokens.
         if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
             if self.token.is_keyword(kw::Const) {
-                self.diagnostic()
-                    .struct_span_err(self.token.span, "extern items cannot be `const`")
-                    .span_suggestion(
+                let mut err = self
+                    .struct_span_err(self.token.span, "extern items cannot be `const`");
+
+
+                // The user wrote 'const fn'
+                if self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe]) {
+                    err.emit();
+                    // Consume `const`
+                    self.bump();
+                    // Consume `unsafe` if present, since `extern` blocks
+                    // don't allow it. This will leave behind a plain 'fn'
+                    self.eat_keyword(kw::Unsafe);
+                    // Treat 'const fn` as a plain `fn` for error recovery purposes.
+                    // We've already emitted an error, so compilation is guaranteed
+                    // to fail
+                    return Ok(self.parse_item_foreign_fn(visibility, lo, attrs, extern_sp)?);
+                }
+                err.span_suggestion(
                         self.token.span,
                         "try using a static value",
                         "static".to_owned(),
                         Applicability::MachineApplicable
-                    ).emit();
+                );
+                err.emit();
             }
             self.bump(); // `static` or `const`
             return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
@@ -1322,28 +1187,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parses a function declaration from a foreign module.
-    fn parse_item_foreign_fn(
-        &mut self,
-        vis: ast::Visibility,
-        lo: Span,
-        attrs: Vec<Attribute>,
-        extern_sp: Span,
-    ) -> PResult<'a, ForeignItem> {
-        self.expect_keyword(kw::Fn)?;
-        let (ident, decl, generics) = self.parse_fn_sig(true)?;
-        let span = lo.to(self.token.span);
-        self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?;
-        Ok(ast::ForeignItem {
-            ident,
-            attrs,
-            kind: ForeignItemKind::Fn(decl, generics),
-            id: DUMMY_NODE_ID,
-            span,
-            vis,
-        })
-    }
-
     /// Parses a static item from a foreign module.
     /// Assumes that the `static` keyword is already parsed.
     fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
@@ -1884,3 +1727,466 @@ impl<'a> Parser<'a> {
         })
     }
 }
+
+/// The parsing configuration used to parse a parameter list (see `parse_fn_params`).
+pub(super) struct ParamCfg {
+    /// Is `self` is allowed as the first parameter?
+    pub is_self_allowed: bool,
+    /// Is `...` allowed as the tail of the parameter list?
+    pub allow_c_variadic: bool,
+    /// `is_name_required` decides if, per-parameter,
+    /// the parameter must have a pattern or just a type.
+    pub is_name_required: fn(&token::Token) -> bool,
+}
+
+/// Parsing of functions and methods.
+impl<'a> Parser<'a> {
+    /// Parses an item-position function declaration.
+    fn parse_item_fn(
+        &mut self,
+        lo: Span,
+        vis: Visibility,
+        attrs: Vec<Attribute>,
+        header: FnHeader,
+    ) -> PResult<'a, Option<P<Item>>> {
+        let (ident, decl, generics) = self.parse_fn_sig(ParamCfg {
+            is_self_allowed: false,
+            allow_c_variadic: header.abi == Abi::C && header.unsafety == Unsafety::Unsafe,
+            is_name_required: |_| true,
+        })?;
+        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+        let kind = ItemKind::Fn(decl, header, generics, body);
+        self.mk_item_with_info(attrs, lo, vis, (ident, kind, Some(inner_attrs)))
+    }
+
+    /// Parses a function declaration from a foreign module.
+    fn parse_item_foreign_fn(
+        &mut self,
+        vis: ast::Visibility,
+        lo: Span,
+        attrs: Vec<Attribute>,
+        extern_sp: Span,
+    ) -> PResult<'a, ForeignItem> {
+        self.expect_keyword(kw::Fn)?;
+        let (ident, decl, generics) = self.parse_fn_sig(ParamCfg {
+            is_self_allowed: false,
+            allow_c_variadic: true,
+            is_name_required: |_| true,
+        })?;
+        let span = lo.to(self.token.span);
+        self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?;
+        Ok(ast::ForeignItem {
+            ident,
+            attrs,
+            kind: ForeignItemKind::Fn(decl, generics),
+            id: DUMMY_NODE_ID,
+            span,
+            vis,
+        })
+    }
+
+    /// Parses a method or a macro invocation in a trait impl.
+    fn parse_impl_method(
+        &mut self,
+        at_end: &mut bool,
+    ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ImplItemKind)> {
+        let (ident, sig, generics) = self.parse_method_sig(|_| true)?;
+        *at_end = true;
+        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+        Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body)))
+    }
+
+    fn parse_trait_item_method(
+        &mut self,
+        at_end: &mut bool,
+        attrs: &mut Vec<Attribute>,
+    ) -> PResult<'a, (Ident, TraitItemKind, Generics)> {
+        // This is somewhat dubious; We don't want to allow
+        // argument names to be left off if there is a definition...
+        //
+        // We don't allow argument names to be left off in edition 2018.
+        let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?;
+        let body = self.parse_trait_method_body(at_end, attrs)?;
+        Ok((ident, TraitItemKind::Method(sig, body), generics))
+    }
+
+    /// Parse the "body" of a method in a trait item definition.
+    /// This can either be `;` when there's no body,
+    /// or e.g. a block when the method is a provided one.
+    fn parse_trait_method_body(
+        &mut self,
+        at_end: &mut bool,
+        attrs: &mut Vec<Attribute>,
+    ) -> PResult<'a, Option<P<Block>>> {
+        Ok(match self.token.kind {
+            token::Semi => {
+                debug!("parse_trait_method_body(): parsing required method");
+                self.bump();
+                *at_end = true;
+                None
+            }
+            token::OpenDelim(token::Brace) => {
+                debug!("parse_trait_method_body(): parsing provided method");
+                *at_end = true;
+                let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+                attrs.extend(inner_attrs.iter().cloned());
+                Some(body)
+            }
+            token::Interpolated(ref nt) => {
+                match **nt {
+                    token::NtBlock(..) => {
+                        *at_end = true;
+                        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+                        attrs.extend(inner_attrs.iter().cloned());
+                        Some(body)
+                    }
+                    _ => return self.expected_semi_or_open_brace(),
+                }
+            }
+            _ => return self.expected_semi_or_open_brace(),
+        })
+    }
+
+    /// Parse the "signature", including the identifier, parameters, and generics
+    /// of a method. The body is not parsed as that differs between `trait`s and `impl`s.
+    fn parse_method_sig(
+        &mut self,
+        is_name_required: fn(&token::Token) -> bool,
+    ) -> PResult<'a, (Ident, MethodSig, Generics)> {
+        let header = self.parse_fn_front_matter()?;
+        let (ident, decl, generics) = self.parse_fn_sig(ParamCfg {
+            is_self_allowed: true,
+            allow_c_variadic: false,
+            is_name_required,
+        })?;
+        Ok((ident, MethodSig { header, decl }, generics))
+    }
+
+    /// Parses all the "front matter" for a `fn` declaration, up to
+    /// and including the `fn` keyword:
+    ///
+    /// - `const fn`
+    /// - `unsafe fn`
+    /// - `const unsafe fn`
+    /// - `extern fn`
+    /// - etc.
+    fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> {
+        let is_const_fn = self.eat_keyword(kw::Const);
+        let const_span = self.prev_span;
+        let asyncness = self.parse_asyncness();
+        if let IsAsync::Async { .. } = asyncness {
+            self.ban_async_in_2015(self.prev_span);
+        }
+        let asyncness = respan(self.prev_span, asyncness);
+        let unsafety = self.parse_unsafety();
+        let (constness, unsafety, abi) = if is_const_fn {
+            (respan(const_span, Constness::Const), unsafety, Abi::Rust)
+        } else {
+            let abi = self.parse_extern_abi()?;
+            (respan(self.prev_span, Constness::NotConst), unsafety, abi)
+        };
+        if !self.eat_keyword(kw::Fn) {
+            // It is possible for `expect_one_of` to recover given the contents of
+            // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
+            // account for this.
+            if !self.expect_one_of(&[], &[])? { unreachable!() }
+        }
+        Ok(FnHeader { constness, unsafety, asyncness, abi })
+    }
+
+    /// Parse the "signature", including the identifier, parameters, and generics of a function.
+    fn parse_fn_sig(&mut self, cfg: ParamCfg) -> PResult<'a, (Ident, P<FnDecl>, Generics)> {
+        let ident = self.parse_ident()?;
+        let mut generics = self.parse_generics()?;
+        let decl = self.parse_fn_decl(cfg, true)?;
+        generics.where_clause = self.parse_where_clause()?;
+        Ok((ident, decl, generics))
+    }
+
+    /// Parses the parameter list and result type of a function declaration.
+    pub(super) fn parse_fn_decl(
+        &mut self,
+        cfg: ParamCfg,
+        ret_allow_plus: bool,
+    ) -> PResult<'a, P<FnDecl>> {
+        Ok(P(FnDecl {
+            inputs: self.parse_fn_params(cfg)?,
+            output: self.parse_ret_ty(ret_allow_plus)?,
+        }))
+    }
+
+    /// Parses the parameter list of a function, including the `(` and `)` delimiters.
+    fn parse_fn_params(&mut self, mut cfg: ParamCfg) -> PResult<'a, Vec<Param>> {
+        let sp = self.token.span;
+        let is_trait_item = cfg.is_self_allowed;
+        let mut c_variadic = false;
+        // Parse the arguments, starting out with `self` being possibly allowed...
+        let (params, _) = self.parse_paren_comma_seq(|p| {
+            let param = p.parse_param_general(&cfg, is_trait_item);
+            // ...now that we've parsed the first argument, `self` is no longer allowed.
+            cfg.is_self_allowed = false;
+
+            match param {
+                Ok(param) => Ok(
+                    if let TyKind::CVarArgs = param.ty.kind {
+                        c_variadic = true;
+                        if p.token != token::CloseDelim(token::Paren) {
+                            p.span_err(
+                                p.token.span,
+                                "`...` must be the last argument of a C-variadic function",
+                            );
+                            // FIXME(eddyb) this should probably still push `CVarArgs`.
+                            // Maybe AST validation/HIR lowering should emit the above error?
+                            None
+                        } else {
+                            Some(param)
+                        }
+                    } else {
+                        Some(param)
+                    }
+                ),
+                Err(mut e) => {
+                    e.emit();
+                    let lo = p.prev_span;
+                    // Skip every token until next possible arg or end.
+                    p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
+                    // Create a placeholder argument for proper arg count (issue #34264).
+                    let span = lo.to(p.prev_span);
+                    Ok(Some(dummy_arg(Ident::new(kw::Invalid, span))))
+                }
+            }
+        })?;
+
+        let mut params: Vec<_> = params.into_iter().filter_map(|x| x).collect();
+
+        // Replace duplicated recovered params with `_` pattern to avoid unecessary errors.
+        self.deduplicate_recovered_params_names(&mut params);
+
+        if c_variadic && params.len() <= 1 {
+            self.span_err(
+                sp,
+                "C-variadic function must be declared with at least one named argument",
+            );
+        }
+
+        Ok(params)
+    }
+
+    /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+    /// error.
+    /// This version of parse param doesn't necessarily require identifier names.
+    fn parse_param_general(&mut self, cfg: &ParamCfg, is_trait_item: bool) -> PResult<'a, Param> {
+        let lo = self.token.span;
+        let attrs = self.parse_outer_attributes()?;
+
+        // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
+        if let Some(mut param) = self.parse_self_param()? {
+            param.attrs = attrs.into();
+            return if cfg.is_self_allowed {
+                Ok(param)
+            } else {
+                self.recover_bad_self_param(param, is_trait_item)
+            };
+        }
+
+        let is_name_required = match self.token.kind {
+            token::DotDotDot => false,
+            _ => (cfg.is_name_required)(&self.token),
+        };
+        let (pat, ty) = if is_name_required || self.is_named_param() {
+            debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
+
+            let pat = self.parse_fn_param_pat()?;
+            if let Err(mut err) = self.expect(&token::Colon) {
+                return if let Some(ident) = self.parameter_without_type(
+                    &mut err,
+                    pat,
+                    is_name_required,
+                    cfg.is_self_allowed,
+                    is_trait_item,
+                ) {
+                    err.emit();
+                    Ok(dummy_arg(ident))
+                } else {
+                    Err(err)
+                };
+            }
+
+            self.eat_incorrect_doc_comment_for_param_type();
+            (pat, self.parse_ty_common(true, true, cfg.allow_c_variadic)?)
+        } else {
+            debug!("parse_param_general ident_to_pat");
+            let parser_snapshot_before_ty = self.clone();
+            self.eat_incorrect_doc_comment_for_param_type();
+            let mut ty = self.parse_ty_common(true, true, cfg.allow_c_variadic);
+            if ty.is_ok() && self.token != token::Comma &&
+               self.token != token::CloseDelim(token::Paren) {
+                // This wasn't actually a type, but a pattern looking like a type,
+                // so we are going to rollback and re-parse for recovery.
+                ty = self.unexpected();
+            }
+            match ty {
+                Ok(ty) => {
+                    let ident = Ident::new(kw::Invalid, self.prev_span);
+                    let bm = BindingMode::ByValue(Mutability::Immutable);
+                    let pat = self.mk_pat_ident(ty.span, bm, ident);
+                    (pat, ty)
+                }
+                // If this is a C-variadic argument and we hit an error, return the error.
+                Err(err) if self.token == token::DotDotDot => return Err(err),
+                // Recover from attempting to parse the argument as a type without pattern.
+                Err(mut err) => {
+                    err.cancel();
+                    mem::replace(self, parser_snapshot_before_ty);
+                    self.recover_arg_parse()?
+                }
+            }
+        };
+
+        let span = lo.to(self.token.span);
+
+        Ok(Param {
+            attrs: attrs.into(),
+            id: ast::DUMMY_NODE_ID,
+            is_placeholder: false,
+            pat,
+            span,
+            ty,
+        })
+    }
+
+    /// Returns the parsed optional self parameter and whether a self shortcut was used.
+    ///
+    /// See `parse_self_param_with_attrs` to collect attributes.
+    fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
+        // Extract an identifier *after* having confirmed that the token is one.
+        let expect_self_ident = |this: &mut Self| {
+            match this.token.kind {
+                // Preserve hygienic context.
+                token::Ident(name, _) => {
+                    let span = this.token.span;
+                    this.bump();
+                    Ident::new(name, span)
+                }
+                _ => unreachable!(),
+            }
+        };
+        // Is `self` `n` tokens ahead?
+        let is_isolated_self = |this: &Self, n| {
+            this.is_keyword_ahead(n, &[kw::SelfLower])
+            && this.look_ahead(n + 1, |t| t != &token::ModSep)
+        };
+        // Is `mut self` `n` tokens ahead?
+        let is_isolated_mut_self = |this: &Self, n| {
+            this.is_keyword_ahead(n, &[kw::Mut])
+            && is_isolated_self(this, n + 1)
+        };
+        // Parse `self` or `self: TYPE`. We already know the current token is `self`.
+        let parse_self_possibly_typed = |this: &mut Self, m| {
+            let eself_ident = expect_self_ident(this);
+            let eself_hi = this.prev_span;
+            let eself = if this.eat(&token::Colon) {
+                SelfKind::Explicit(this.parse_ty()?, m)
+            } else {
+                SelfKind::Value(m)
+            };
+            Ok((eself, eself_ident, eself_hi))
+        };
+        // Recover for the grammar `*self`, `*const self`, and `*mut self`.
+        let recover_self_ptr = |this: &mut Self| {
+            let msg = "cannot pass `self` by raw pointer";
+            let span = this.token.span;
+            this.struct_span_err(span, msg)
+                .span_label(span, msg)
+                .emit();
+
+            Ok((SelfKind::Value(Mutability::Immutable), expect_self_ident(this), this.prev_span))
+        };
+
+        // Parse optional `self` parameter of a method.
+        // Only a limited set of initial token sequences is considered `self` parameters; anything
+        // else is parsed as a normal function parameter list, so some lookahead is required.
+        let eself_lo = self.token.span;
+        let (eself, eself_ident, eself_hi) = match self.token.kind {
+            token::BinOp(token::And) => {
+                let eself = if is_isolated_self(self, 1) {
+                    // `&self`
+                    self.bump();
+                    SelfKind::Region(None, Mutability::Immutable)
+                } else if is_isolated_mut_self(self, 1) {
+                    // `&mut self`
+                    self.bump();
+                    self.bump();
+                    SelfKind::Region(None, Mutability::Mutable)
+                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) {
+                    // `&'lt self`
+                    self.bump();
+                    let lt = self.expect_lifetime();
+                    SelfKind::Region(Some(lt), Mutability::Immutable)
+                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) {
+                    // `&'lt mut self`
+                    self.bump();
+                    let lt = self.expect_lifetime();
+                    self.bump();
+                    SelfKind::Region(Some(lt), Mutability::Mutable)
+                } else {
+                    // `&not_self`
+                    return Ok(None);
+                };
+                (eself, expect_self_ident(self), self.prev_span)
+            }
+            // `*self`
+            token::BinOp(token::Star) if is_isolated_self(self, 1) => {
+                self.bump();
+                recover_self_ptr(self)?
+            }
+            // `*mut self` and `*const self`
+            token::BinOp(token::Star) if
+                self.look_ahead(1, |t| t.is_mutability())
+                && is_isolated_self(self, 2) =>
+            {
+                self.bump();
+                self.bump();
+                recover_self_ptr(self)?
+            }
+            // `self` and `self: TYPE`
+            token::Ident(..) if is_isolated_self(self, 0) => {
+                parse_self_possibly_typed(self, Mutability::Immutable)?
+            }
+            // `mut self` and `mut self: TYPE`
+            token::Ident(..) if is_isolated_mut_self(self, 0) => {
+                self.bump();
+                parse_self_possibly_typed(self, Mutability::Mutable)?
+            }
+            _ => return Ok(None),
+        };
+
+        let eself = source_map::respan(eself_lo.to(eself_hi), eself);
+        Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident)))
+    }
+
+    fn is_named_param(&self) -> bool {
+        let offset = match self.token.kind {
+            token::Interpolated(ref nt) => match **nt {
+                token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
+                _ => 0,
+            }
+            token::BinOp(token::And) | token::AndAnd => 1,
+            _ if self.token.is_keyword(kw::Mut) => 1,
+            _ => 0,
+        };
+
+        self.look_ahead(offset, |t| t.is_ident()) &&
+        self.look_ahead(offset + 1, |t| t == &token::Colon)
+    }
+
+    fn recover_first_param(&mut self) -> &'static str {
+        match self.parse_outer_attributes()
+            .and_then(|_| self.parse_self_param())
+            .map_err(|mut e| e.cancel())
+        {
+            Ok(Some(_)) => "method",
+            _ => "function",
+        }
+    }
+}
diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs
index 2d2fb487d7d..a0e4d2bbb7a 100644
--- a/src/libsyntax/parse/parser/module.rs
+++ b/src/libsyntax/parse/parser/module.rs
@@ -1,24 +1,24 @@
 use super::{Parser, PResult};
 use super::item::ItemInfo;
+use super::diagnostics::Error;
 
 use crate::attr;
 use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
 use crate::parse::{new_sub_parser_from_file, DirectoryOwnership};
 use crate::parse::token::{self, TokenKind};
-use crate::parse::diagnostics::{Error};
 use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName};
 use crate::symbol::sym;
 
 use std::path::{self, Path, PathBuf};
 
 /// Information about the path to a module.
-pub struct ModulePath {
+pub(super) struct ModulePath {
     name: String,
     path_exists: bool,
     pub result: Result<ModulePathSuccess, Error>,
 }
 
-pub struct ModulePathSuccess {
+pub(super) struct ModulePathSuccess {
     pub path: PathBuf,
     pub directory_ownership: DirectoryOwnership,
     warn: bool,
@@ -39,6 +39,8 @@ impl<'a> Parser<'a> {
     /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
     pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
         let (in_cfg, outer_attrs) = {
+            // FIXME(Centril): This results in a cycle between config and parsing.
+            // Consider using dynamic dispatch via `self.sess` to disentangle the knot.
             let mut strip_unconfigured = crate::config::StripUnconfigured {
                 sess: self.sess,
                 features: None, // Don't perform gated feature checking.
@@ -198,7 +200,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
+    pub(super) fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
         if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
             let s = s.as_str();
 
@@ -215,7 +217,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Returns a path to a module.
-    pub fn default_submod_path(
+    pub(super) fn default_submod_path(
         id: ast::Ident,
         relative: Option<ast::Ident>,
         dir_path: &Path,
diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs
index 7eb2a73a11a..af795e51792 100644
--- a/src/libsyntax/parse/parser/pat.rs
+++ b/src/libsyntax/parse/parser/pat.rs
@@ -4,7 +4,7 @@ use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 use crate::ptr::P;
 use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
 use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
-use crate::mut_visit::{noop_visit_pat, MutVisitor};
+use crate::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor};
 use crate::parse::token::{self};
 use crate::print::pprust;
 use crate::source_map::{respan, Span, Spanned};
@@ -22,7 +22,7 @@ const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here"
 
 /// Whether or not an or-pattern should be gated when occurring in the current context.
 #[derive(PartialEq)]
-pub enum GateOr { Yes, No }
+pub(super) enum GateOr { Yes, No }
 
 /// Whether or not to recover a `,` when parsing or-patterns.
 #[derive(PartialEq, Copy, Clone)]
@@ -367,6 +367,7 @@ impl<'a> Parser<'a> {
 
         let pat = self.mk_pat(lo.to(self.prev_span), pat);
         let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
+        let pat = self.recover_intersection_pat(pat)?;
 
         if !allow_range_pat {
             self.ban_pat_range_if_ambiguous(&pat)?
@@ -375,6 +376,65 @@ impl<'a> Parser<'a> {
         Ok(pat)
     }
 
+    /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`.
+    ///
+    /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs`
+    /// should already have been parsed by now  at this point,
+    /// if the next token is `@` then we can try to parse the more general form.
+    ///
+    /// Consult `parse_pat_ident` for the `binding` grammar.
+    ///
+    /// The notion of intersection patterns are found in
+    /// e.g. [F#][and] where they are called AND-patterns.
+    ///
+    /// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching
+    fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> {
+        if self.token.kind != token::At {
+            // Next token is not `@` so it's not going to be an intersection pattern.
+            return Ok(lhs);
+        }
+
+        // At this point we attempt to parse `@ $pat_rhs` and emit an error.
+        self.bump(); // `@`
+        let mut rhs = self.parse_pat(None)?;
+        let sp = lhs.span.to(rhs.span);
+
+        if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind {
+            // The user inverted the order, so help them fix that.
+            let mut applicability = Applicability::MachineApplicable;
+            lhs.walk(&mut |p| match p.kind {
+                // `check_match` is unhappy if the subpattern has a binding anywhere.
+                PatKind::Ident(..) => {
+                    applicability = Applicability::MaybeIncorrect;
+                    false // Short-circuit.
+                },
+                _ => true,
+            });
+
+            let lhs_span = lhs.span;
+            // Move the LHS into the RHS as a subpattern.
+            // The RHS is now the full pattern.
+            *sub = Some(lhs);
+
+            self.struct_span_err(sp, "pattern on wrong side of `@`")
+                .span_label(lhs_span, "pattern on the left, should be on the right")
+                .span_label(rhs.span, "binding on the right, should be on the left")
+                .span_suggestion(sp, "switch the order", pprust::pat_to_string(&rhs), applicability)
+                .emit();
+        } else {
+            // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
+            rhs.kind = PatKind::Wild;
+            self.struct_span_err(sp, "left-hand side of `@` must be a binding")
+                .span_label(lhs.span, "interpreted as a pattern, not a binding")
+                .span_label(rhs.span, "also a pattern")
+                .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`")
+                .emit();
+        }
+
+        rhs.span = sp;
+        Ok(rhs)
+    }
+
     /// Ban a range pattern if it has an ambiguous interpretation.
     fn ban_pat_range_if_ambiguous(&self, pat: &Pat) -> PResult<'a, ()> {
         match pat.kind {
@@ -481,6 +541,10 @@ impl<'a> Parser<'a> {
     fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
         struct AddMut(bool);
         impl MutVisitor for AddMut {
+            fn visit_mac(&mut self, mac: &mut Mac) {
+                noop_visit_mac(mac, self);
+            }
+
             fn visit_pat(&mut self, pat: &mut P<Pat>) {
                 if let PatKind::Ident(BindingMode::ByValue(ref mut m @ Mutability::Immutable), ..)
                     = pat.kind
diff --git a/src/libsyntax/parse/parser/path.rs b/src/libsyntax/parse/parser/path.rs
index ca823991a2e..639d61a2b5c 100644
--- a/src/libsyntax/parse/parser/path.rs
+++ b/src/libsyntax/parse/parser/path.rs
@@ -111,7 +111,7 @@ impl<'a> Parser<'a> {
     /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
     /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
     /// attributes.
-    pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> {
+    fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> {
         let meta_ident = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref item) => match item.tokens.is_empty() {
@@ -129,7 +129,22 @@ impl<'a> Parser<'a> {
         self.parse_path(style)
     }
 
-    crate fn parse_path_segments(
+    /// Parse a list of paths inside `#[derive(path_0, ..., path_n)]`.
+    crate fn parse_derive_paths(&mut self) -> PResult<'a, Vec<Path>> {
+        self.expect(&token::OpenDelim(token::Paren))?;
+        let mut list = Vec::new();
+        while !self.eat(&token::CloseDelim(token::Paren)) {
+            let path = self.parse_path_allowing_meta(PathStyle::Mod)?;
+            list.push(path);
+            if !self.eat(&token::Comma) {
+                self.expect(&token::CloseDelim(token::Paren))?;
+                break
+            }
+        }
+        Ok(list)
+    }
+
+    pub(super) fn parse_path_segments(
         &mut self,
         segments: &mut Vec<PathSegment>,
         style: PathStyle,
diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs
index 855b03ddd6f..d54d9c4b8e9 100644
--- a/src/libsyntax/parse/parser/stmt.rs
+++ b/src/libsyntax/parse/parser/stmt.rs
@@ -2,14 +2,13 @@ use super::{Parser, PResult, Restrictions, PrevTokenKind, SemiColonMode, BlockMo
 use super::expr::LhsExpr;
 use super::path::PathStyle;
 use super::pat::GateOr;
+use super::diagnostics::Error;
 
 use crate::ptr::P;
 use crate::{maybe_whole, ThinVec};
 use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
 use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
-use crate::ext::base::DummyResult;
 use crate::parse::{classify, DirectoryOwnership};
-use crate::parse::diagnostics::Error;
 use crate::parse::token;
 use crate::source_map::{respan, Span};
 use crate::symbol::{kw, sym};
@@ -373,7 +372,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a block. Inner attributes are allowed.
-    crate fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
+    pub(super) fn parse_inner_attrs_and_block(
+        &mut self
+    ) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
         maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
 
         let lo = self.token.span;
@@ -400,7 +401,7 @@ impl<'a> Parser<'a> {
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     Some(Stmt {
                         id: DUMMY_NODE_ID,
-                        kind: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)),
+                        kind: StmtKind::Expr(self.mk_expr_err(self.token.span)),
                         span: self.token.span,
                     })
                 }
@@ -422,7 +423,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a statement, including the trailing semicolon.
-    crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+    pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
         // Skip looking for a trailing semicolon when we have an interpolated statement.
         maybe_whole!(self, NtStmt, |x| Some(x));
 
@@ -443,7 +444,7 @@ impl<'a> Parser<'a> {
                         self.recover_stmt();
                         // Don't complain about type errors in body tail after parse error (#57383).
                         let sp = expr.span.to(self.prev_span);
-                        stmt.kind = StmtKind::Expr(DummyResult::raw_expr(sp, true));
+                        stmt.kind = StmtKind::Expr(self.mk_expr_err(sp));
                     }
                 }
             }
diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs
index 41ee2a1599d..86c94b680b2 100644
--- a/src/libsyntax/parse/parser/ty.rs
+++ b/src/libsyntax/parse/parser/ty.rs
@@ -1,16 +1,15 @@
 use super::{Parser, PResult, PathStyle, PrevTokenKind, TokenType};
+use super::item::ParamCfg;
 
 use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath};
 use crate::ptr::P;
 use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
 use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
-use crate::ast::{Mutability, AnonConst, FnDecl, Mac};
+use crate::ast::{Mutability, AnonConst, Mac};
 use crate::parse::token::{self, Token};
 use crate::source_map::Span;
 use crate::symbol::{kw};
 
-use rustc_target::spec::abi::Abi;
-
 use errors::{Applicability, pluralise};
 
 /// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
@@ -211,7 +210,7 @@ impl<'a> Parser<'a> {
         };
 
         let span = lo.to(self.prev_span);
-        let ty = P(Ty { kind, span, id: ast::DUMMY_NODE_ID });
+        let ty = self.mk_ty(span, kind);
 
         // Try to recover from use of `+` with incorrect priority.
         self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
@@ -281,19 +280,14 @@ impl<'a> Parser<'a> {
         */
 
         let unsafety = self.parse_unsafety();
-        let abi = if self.eat_keyword(kw::Extern) {
-            self.parse_opt_abi()?.unwrap_or(Abi::C)
-        } else {
-            Abi::Rust
-        };
-
+        let abi = self.parse_extern_abi()?;
         self.expect_keyword(kw::Fn)?;
-        let inputs = self.parse_fn_params(false, true)?;
-        let ret_ty = self.parse_ret_ty(false)?;
-        let decl = P(FnDecl {
-            inputs,
-            output: ret_ty,
-        });
+        let cfg = ParamCfg {
+            is_self_allowed: false,
+            allow_c_variadic: true,
+            is_name_required: |_| false,
+        };
+        let decl = self.parse_fn_decl(cfg, false)?;
         Ok(TyKind::BareFn(P(BareFnTy {
             abi,
             unsafety,
@@ -302,7 +296,7 @@ impl<'a> Parser<'a> {
         })))
     }
 
-    crate fn parse_generic_bounds(&mut self,
+    pub(super) fn parse_generic_bounds(&mut self,
                                   colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
         self.parse_generic_bounds_common(true, colon_span)
     }
@@ -439,13 +433,13 @@ impl<'a> Parser<'a> {
         }
     }
 
-    crate fn check_lifetime(&mut self) -> bool {
+    pub fn check_lifetime(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Lifetime);
         self.token.is_lifetime()
     }
 
     /// Parses a single lifetime `'a` or panics.
-    crate fn expect_lifetime(&mut self) -> Lifetime {
+    pub fn expect_lifetime(&mut self) -> Lifetime {
         if let Some(ident) = self.token.lifetime() {
             let span = self.token.span;
             self.bump();
@@ -454,4 +448,8 @@ impl<'a> Parser<'a> {
             self.span_bug(self.token.span, "not a lifetime")
         }
     }
+
+    pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> {
+        P(Ty { kind, span, id: ast::DUMMY_NODE_ID })
+    }
 }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index fd78a2bd534..4a8b25c6107 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -4,16 +4,13 @@ pub use DelimToken::*;
 pub use LitKind::*;
 pub use TokenKind::*;
 
-use crate::ast::{self};
-use crate::parse::{parse_stream_from_source_str, ParseSess};
-use crate::print::pprust;
+use crate::ast;
 use crate::ptr::P;
 use crate::symbol::kw;
-use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+use crate::tokenstream::TokenTree;
 
 use syntax_pos::symbol::Symbol;
-use syntax_pos::{self, Span, FileName, DUMMY_SP};
-use log::info;
+use syntax_pos::{self, Span, DUMMY_SP};
 
 use std::fmt;
 use std::mem;
@@ -36,7 +33,7 @@ pub enum BinOpToken {
 }
 
 /// A delimiter token.
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum DelimToken {
     /// A round parenthesis (i.e., `(` or `)`).
     Paren,
@@ -288,7 +285,7 @@ impl TokenKind {
 }
 
 impl Token {
-    crate fn new(kind: TokenKind, span: Span) -> Self {
+    pub fn new(kind: TokenKind, span: Span) -> Self {
         Token { kind, span }
     }
 
@@ -298,12 +295,12 @@ impl Token {
     }
 
     /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
-    crate fn from_ast_ident(ident: ast::Ident) -> Self {
+    pub fn from_ast_ident(ident: ast::Ident) -> Self {
         Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
     }
 
     /// Return this token by value and leave a dummy token in its place.
-    crate fn take(&mut self) -> Self {
+    pub fn take(&mut self) -> Self {
         mem::replace(self, Token::dummy())
     }
 
@@ -324,7 +321,7 @@ impl Token {
     }
 
     /// Returns `true` if the token can appear at the start of an expression.
-    crate fn can_begin_expr(&self) -> bool {
+    pub fn can_begin_expr(&self) -> bool {
         match self.kind {
             Ident(name, is_raw)              =>
                 ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
@@ -356,7 +353,7 @@ impl Token {
     }
 
     /// Returns `true` if the token can appear at the start of a type.
-    crate fn can_begin_type(&self) -> bool {
+    pub fn can_begin_type(&self) -> bool {
         match self.kind {
             Ident(name, is_raw)        =>
                 ident_can_begin_type(name, self.span, is_raw), // type name or keyword
@@ -399,7 +396,7 @@ impl Token {
     }
 
     /// Returns `true` if the token is any literal
-    crate fn is_lit(&self) -> bool {
+    pub fn is_lit(&self) -> bool {
         match self.kind {
             Literal(..) => true,
             _           => false,
@@ -415,7 +412,7 @@ impl Token {
 
     /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
     /// for example a '-42', or one of the boolean idents).
-    crate fn can_begin_literal_or_bool(&self) -> bool {
+    pub fn can_begin_literal_or_bool(&self) -> bool {
         match self.kind {
             Literal(..) | BinOp(Minus) => true,
             Ident(name, false) if name.is_bool_lit() => true,
@@ -737,131 +734,3 @@ impl fmt::Debug for Nonterminal {
         }
     }
 }
-
-impl Nonterminal {
-    pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
-        // A `Nonterminal` is often a parsed AST item. At this point we now
-        // need to convert the parsed AST to an actual token stream, e.g.
-        // un-parse it basically.
-        //
-        // Unfortunately there's not really a great way to do that in a
-        // guaranteed lossless fashion right now. The fallback here is to just
-        // stringify the AST node and reparse it, but this loses all span
-        // information.
-        //
-        // As a result, some AST nodes are annotated with the token stream they
-        // came from. Here we attempt to extract these lossless token streams
-        // before we fall back to the stringification.
-        let tokens = match *self {
-            Nonterminal::NtItem(ref item) => {
-                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
-            }
-            Nonterminal::NtTraitItem(ref item) => {
-                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
-            }
-            Nonterminal::NtImplItem(ref item) => {
-                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
-            }
-            Nonterminal::NtIdent(ident, is_raw) => {
-                Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
-            }
-            Nonterminal::NtLifetime(ident) => {
-                Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
-            }
-            Nonterminal::NtTT(ref tt) => {
-                Some(tt.clone().into())
-            }
-            _ => None,
-        };
-
-        // FIXME(#43081): Avoid this pretty-print + reparse hack
-        let source = pprust::nonterminal_to_string(self);
-        let filename = FileName::macro_expansion_source_code(&source);
-        let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span));
-
-        // During early phases of the compiler the AST could get modified
-        // directly (e.g., attributes added or removed) and the internal cache
-        // of tokens my not be invalidated or updated. Consequently if the
-        // "lossless" token stream disagrees with our actual stringification
-        // (which has historically been much more battle-tested) then we go
-        // with the lossy stream anyway (losing span information).
-        //
-        // Note that the comparison isn't `==` here to avoid comparing spans,
-        // but it *also* is a "probable" equality which is a pretty weird
-        // definition. We mostly want to catch actual changes to the AST
-        // like a `#[cfg]` being processed or some weird `macro_rules!`
-        // expansion.
-        //
-        // What we *don't* want to catch is the fact that a user-defined
-        // literal like `0xf` is stringified as `15`, causing the cached token
-        // stream to not be literal `==` token-wise (ignoring spans) to the
-        // token stream we got from stringification.
-        //
-        // Instead the "probably equal" check here is "does each token
-        // recursively have the same discriminant?" We basically don't look at
-        // the token values here and assume that such fine grained token stream
-        // modifications, including adding/removing typically non-semantic
-        // tokens such as extra braces and commas, don't happen.
-        if let Some(tokens) = tokens {
-            if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
-                return tokens
-            }
-            info!("cached tokens found, but they're not \"probably equal\", \
-                   going with stringified version");
-        }
-        return tokens_for_real
-    }
-}
-
-fn prepend_attrs(sess: &ParseSess,
-                 attrs: &[ast::Attribute],
-                 tokens: Option<&tokenstream::TokenStream>,
-                 span: syntax_pos::Span)
-    -> Option<tokenstream::TokenStream>
-{
-    let tokens = tokens?;
-    if attrs.len() == 0 {
-        return Some(tokens.clone())
-    }
-    let mut builder = tokenstream::TokenStreamBuilder::new();
-    for attr in attrs {
-        assert_eq!(attr.style, ast::AttrStyle::Outer,
-                   "inner attributes should prevent cached tokens from existing");
-
-        let source = pprust::attribute_to_string(attr);
-        let macro_filename = FileName::macro_expansion_source_code(&source);
-        if attr.is_sugared_doc {
-            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
-            builder.push(stream);
-            continue
-        }
-
-        // synthesize # [ $path $tokens ] manually here
-        let mut brackets = tokenstream::TokenStreamBuilder::new();
-
-        // For simple paths, push the identifier directly
-        if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
-            let ident = attr.path.segments[0].ident;
-            let token = Ident(ident.name, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::token(token, ident.span));
-
-        // ... and for more complicated paths, fall back to a reparse hack that
-        // should eventually be removed.
-        } else {
-            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
-            brackets.push(stream);
-        }
-
-        brackets.push(attr.tokens.clone());
-
-        // The span we list here for `#` and for `[ ... ]` are both wrong in
-        // that it encompasses more than each token, but it hopefully is "good
-        // enough" for now at least.
-        builder.push(tokenstream::TokenTree::token(Pound, attr.span));
-        let delim_span = DelimSpan::from_single(attr.span);
-        builder.push(tokenstream::TokenTree::Delimited(
-            delim_span, DelimToken::Bracket, brackets.build().into()));
-    }
-    builder.push(tokens.clone());
-    Some(builder.build())
-}