about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2019-09-24 21:46:26 +0000
committerbors <bors@rust-lang.org>2019-09-24 21:46:26 +0000
commitdcd473d7b554a82013913244da8aba1e22a002a9 (patch)
treee3d95102edba19987d74cf0c961be6c86d5d8cab /src/libsyntax
parent6ef275e6c3cb1384ec78128eceeb4963ff788dca (diff)
parentaeb24142be8f39975c72a63f57c70d0adc6cc7a6 (diff)
downloadrust-dcd473d7b554a82013913244da8aba1e22a002a9.tar.gz
rust-dcd473d7b554a82013913244da8aba1e22a002a9.zip
Auto merge of #64751 - Centril:rollup-hpbmcfj, r=Centril
Rollup of 16 pull requests

Successful merges:

 - #63356 (Issue#63183: Add fs::read_dir() and ReadDir warning about iterator order + example)
 - #63934 (Fix coherence checking for impl trait in type aliases)
 - #64016 (Streamline `Compiler`)
 - #64296 (Document the unstable iter_order_by library feature)
 - #64443 (rustdoc: general cleanup)
 - #64622 (Add a cycle detector for generic `Graph`s and `mir::Body`s)
 - #64689 (Refactor macro by example)
 - #64698 (Recover on `const X = 42;` and infer type + Error Stash API)
 - #64702 (Remove unused dependencies)
 - #64717 (update mem::discriminant test to use assert_eq and assert_ne over comparison operators)
 - #64720 ( remove rtp.rs, and move rtpSpawn and RTP_ID_ERROR to libc)
 - #64721 (Fixed issue from #64447)
 - #64725 (fix one typo)
 - #64737 (fix several issues in String docs)
 - #64742 (relnotes: make compatibility section more sterile and fix rustc version)
 - #64748 (Fix #64744. Account for the Zero sub-pattern case.)

Failed merges:

r? @ghost
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/Cargo.toml1
-rw-r--r--src/libsyntax/ext/base.rs4
-rw-r--r--src/libsyntax/ext/expand.rs8
-rw-r--r--src/libsyntax/ext/mbe.rs166
-rw-r--r--src/libsyntax/ext/mbe/macro_check.rs (renamed from src/libsyntax/ext/tt/macro_check.rs)4
-rw-r--r--src/libsyntax/ext/mbe/macro_parser.rs (renamed from src/libsyntax/ext/tt/macro_parser.rs)26
-rw-r--r--src/libsyntax/ext/mbe/macro_rules.rs (renamed from src/libsyntax/ext/tt/macro_rules.rs)153
-rw-r--r--src/libsyntax/ext/mbe/quoted.rs (renamed from src/libsyntax/ext/tt/quoted.rs)172
-rw-r--r--src/libsyntax/ext/mbe/transcribe.rs (renamed from src/libsyntax/ext/tt/transcribe.rs)36
-rw-r--r--src/libsyntax/lib.rs9
-rw-r--r--src/libsyntax/parse/parser/item.rs47
-rw-r--r--src/libsyntax/tokenstream.rs15
12 files changed, 340 insertions, 301 deletions
diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml
index d4a9acc1569..196cf4d9dfa 100644
--- a/src/libsyntax/Cargo.toml
+++ b/src/libsyntax/Cargo.toml
@@ -19,6 +19,5 @@ syntax_pos = { path = "../libsyntax_pos" }
 errors = { path = "../librustc_errors", package = "rustc_errors" }
 rustc_data_structures = { path = "../librustc_data_structures" }
 rustc_lexer = { path = "../librustc_lexer" }
-rustc_macros = { path = "../librustc_macros" }
 rustc_target = { path = "../librustc_target" }
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index e189d8f8636..aa76667c2e9 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -1017,10 +1017,6 @@ impl<'a> ExtCtxt<'a> {
     pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
         self.parse_sess.span_diagnostic.span_err_with_code(sp, msg, code);
     }
-    pub fn mut_span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str)
-                        -> DiagnosticBuilder<'a> {
-        self.parse_sess.span_diagnostic.mut_span_err(sp, msg)
-    }
     pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
         self.parse_sess.span_diagnostic.span_warn(sp, msg);
     }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index b80c530731d..c8078d2bb71 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -6,7 +6,7 @@ use crate::config::StripUnconfigured;
 use crate::ext::base::*;
 use crate::ext::proc_macro::{collect_derives, MarkAttrs};
 use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
-use crate::ext::tt::macro_rules::annotate_err_with_kind;
+use crate::ext::mbe::macro_rules::annotate_err_with_kind;
 use crate::ext::placeholders::{placeholder, PlaceholderExpander};
 use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
 use crate::mut_visit::*;
@@ -115,8 +115,8 @@ macro_rules! ast_fragments {
             }
         }
 
-        impl<'a> MacResult for crate::ext::tt::macro_rules::ParserAnyMacro<'a> {
-            $(fn $make_ast(self: Box<crate::ext::tt::macro_rules::ParserAnyMacro<'a>>)
+        impl<'a> MacResult for crate::ext::mbe::macro_rules::ParserAnyMacro<'a> {
+            $(fn $make_ast(self: Box<crate::ext::mbe::macro_rules::ParserAnyMacro<'a>>)
                            -> Option<$AstTy> {
                 Some(self.make(AstFragmentKind::$Kind).$make_ast())
             })*
@@ -384,7 +384,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         let attr = attr::find_by_name(item.attrs(), sym::derive)
                             .expect("`derive` attribute should exist");
                         let span = attr.span;
-                        let mut err = self.cx.mut_span_err(span,
+                        let mut err = self.cx.struct_span_err(span,
                             "`derive` may only be applied to structs, enums and unions");
                         if let ast::AttrStyle::Inner = attr.style {
                             let trait_list = derives.iter()
diff --git a/src/libsyntax/ext/mbe.rs b/src/libsyntax/ext/mbe.rs
new file mode 100644
index 00000000000..a87da791c9b
--- /dev/null
+++ b/src/libsyntax/ext/mbe.rs
@@ -0,0 +1,166 @@
+//! This module implements declarative macros: old `macro_rules` and the newer
+//! `macro`. Declarative macros are also known as "macro by example", and that's
+//! why we call this module `mbe`. For external documentation, prefer the
+//! official terminology: "declarative macros".
+
+crate mod transcribe;
+crate mod macro_check;
+crate mod macro_parser;
+crate mod macro_rules;
+crate mod quoted;
+
+use crate::ast;
+use crate::parse::token::{self, Token, TokenKind};
+use crate::tokenstream::{DelimSpan};
+
+use syntax_pos::{BytePos, Span};
+
+use rustc_data_structures::sync::Lrc;
+
+/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
+/// that the delimiter itself might be `NoDelim`.
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Delimited {
+    delim: token::DelimToken,
+    tts: Vec<TokenTree>,
+}
+
+impl Delimited {
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
+    fn open_tt(&self, span: Span) -> TokenTree {
+        let open_span = if span.is_dummy() {
+            span
+        } else {
+            span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
+        };
+        TokenTree::token(token::OpenDelim(self.delim), open_span)
+    }
+
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
+    fn close_tt(&self, span: Span) -> TokenTree {
+        let close_span = if span.is_dummy() {
+            span
+        } else {
+            span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
+        };
+        TokenTree::token(token::CloseDelim(self.delim), close_span)
+    }
+}
+
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct SequenceRepetition {
+    /// The sequence of token trees
+    tts: Vec<TokenTree>,
+    /// The optional separator
+    separator: Option<Token>,
+    /// Whether the sequence can be repeated zero (*), or one or more times (+)
+    kleene: KleeneToken,
+    /// The number of `Match`s that appear in the sequence (and subsequences)
+    num_captures: usize,
+}
+
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
+struct KleeneToken {
+    span: Span,
+    op: KleeneOp,
+}
+
+impl KleeneToken {
+    fn new(op: KleeneOp, span: Span) -> KleeneToken {
+        KleeneToken { span, op }
+    }
+}
+
+/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
+/// for token sequences.
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+enum KleeneOp {
+    /// Kleene star (`*`) for zero or more repetitions
+    ZeroOrMore,
+    /// Kleene plus (`+`) for one or more repetitions
+    OneOrMore,
+    /// Kleene optional (`?`) for zero or one reptitions
+    ZeroOrOne,
+}
+
+/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
+/// are "first-class" token trees. Useful for parsing macros.
+#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
+enum TokenTree {
+    Token(Token),
+    Delimited(DelimSpan, Lrc<Delimited>),
+    /// A kleene-style repetition sequence
+    Sequence(DelimSpan, Lrc<SequenceRepetition>),
+    /// e.g., `$var`
+    MetaVar(Span, ast::Ident),
+    /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
+    MetaVarDecl(
+        Span,
+        ast::Ident, /* name to bind */
+        ast::Ident, /* kind of nonterminal */
+    ),
+}
+
+impl TokenTree {
+    /// Return the number of tokens in the tree.
+    fn len(&self) -> usize {
+        match *self {
+            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+                token::NoDelim => delimed.tts.len(),
+                _ => delimed.tts.len() + 2,
+            },
+            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
+            _ => 0,
+        }
+    }
+
+    /// Returns `true` if the given token tree is delimited.
+    fn is_delimited(&self) -> bool {
+        match *self {
+            TokenTree::Delimited(..) => true,
+            _ => false,
+        }
+    }
+
+    /// Returns `true` if the given token tree is a token of the given kind.
+    fn is_token(&self, expected_kind: &TokenKind) -> bool {
+        match self {
+            TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
+            _ => false,
+        }
+    }
+
+    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
+    fn get_tt(&self, index: usize) -> TokenTree {
+        match (self, index) {
+            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
+                delimed.tts[index].clone()
+            }
+            (&TokenTree::Delimited(span, ref delimed), _) => {
+                if index == 0 {
+                    return delimed.open_tt(span.open);
+                }
+                if index == delimed.tts.len() + 1 {
+                    return delimed.close_tt(span.close);
+                }
+                delimed.tts[index - 1].clone()
+            }
+            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
+            _ => panic!("Cannot expand a token tree"),
+        }
+    }
+
+    /// Retrieves the `TokenTree`'s span.
+    fn span(&self) -> Span {
+        match *self {
+            TokenTree::Token(Token { span, .. })
+            | TokenTree::MetaVar(span, _)
+            | TokenTree::MetaVarDecl(span, _, _) => span,
+            TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
+        }
+    }
+
+    fn token(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span))
+    }
+}
diff --git a/src/libsyntax/ext/tt/macro_check.rs b/src/libsyntax/ext/mbe/macro_check.rs
index 5af97199902..97074f5cbe4 100644
--- a/src/libsyntax/ext/tt/macro_check.rs
+++ b/src/libsyntax/ext/mbe/macro_check.rs
@@ -106,7 +106,7 @@
 //! bound.
 use crate::ast::NodeId;
 use crate::early_buffered_lints::BufferedEarlyLintId;
-use crate::ext::tt::quoted::{KleeneToken, TokenTree};
+use crate::ext::mbe::{KleeneToken, TokenTree};
 use crate::parse::token::TokenKind;
 use crate::parse::token::{DelimToken, Token};
 use crate::parse::ParseSess;
@@ -196,7 +196,7 @@ struct MacroState<'a> {
 /// - `node_id` is used to emit lints
 /// - `span` is used when no spans are available
 /// - `lhses` and `rhses` should have the same length and represent the macro definition
-pub fn check_meta_variables(
+pub(super) fn check_meta_variables(
     sess: &ParseSess,
     node_id: NodeId,
     span: Span,
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/mbe/macro_parser.rs
index dbf14daa30e..b51384d3b15 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/mbe/macro_parser.rs
@@ -70,12 +70,12 @@
 //! eof: [a $( a )* a b ยท]
 //! ```
 
-pub use NamedMatch::*;
-pub use ParseResult::*;
+crate use NamedMatch::*;
+crate use ParseResult::*;
 use TokenTreeOrTokenTreeSlice::*;
 
 use crate::ast::{Ident, Name};
-use crate::ext::tt::quoted::{self, TokenTree};
+use crate::ext::mbe::{self, TokenTree};
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::{Parser, PathStyle};
 use crate::parse::token::{self, DocComment, Nonterminal, Token};
@@ -195,7 +195,7 @@ struct MatcherPos<'root, 'tt> {
     // `None`.
 
     /// The KleeneOp of this sequence if we are in a repetition.
-    seq_op: Option<quoted::KleeneOp>,
+    seq_op: Option<mbe::KleeneOp>,
 
     /// The separator if we are in a repetition.
     sep: Option<Token>,
@@ -267,7 +267,7 @@ impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
 }
 
 /// Represents the possible results of an attempted parse.
-pub enum ParseResult<T> {
+crate enum ParseResult<T> {
     /// Parsed successfully.
     Success(T),
     /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
@@ -279,10 +279,10 @@ pub enum ParseResult<T> {
 
 /// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
 /// This represents the mapping of metavars to the token trees they bind to.
-pub type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
+crate type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
 
 /// Count how many metavars are named in the given matcher `ms`.
-pub fn count_names(ms: &[TokenTree]) -> usize {
+pub(super) fn count_names(ms: &[TokenTree]) -> usize {
     ms.iter().fold(0, |count, elt| {
         count + match *elt {
             TokenTree::Sequence(_, ref seq) => seq.num_captures,
@@ -352,7 +352,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
 /// only on the nesting depth of `ast::TTSeq`s in the originating
 /// token tree it was derived from.
 #[derive(Debug, Clone)]
-pub enum NamedMatch {
+crate enum NamedMatch {
     MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
     MatchedNonterminal(Lrc<Nonterminal>),
 }
@@ -415,7 +415,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
 
 /// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
 /// other tokens, this is "unexpected token...".
-pub fn parse_failure_msg(tok: &Token) -> String {
+crate fn parse_failure_msg(tok: &Token) -> String {
     match tok.kind {
         token::Eof => "unexpected end of macro invocation".to_string(),
         _ => format!(
@@ -532,7 +532,7 @@ fn inner_parse_loop<'root, 'tt>(
                 }
                 // We don't need a separator. Move the "dot" back to the beginning of the matcher
                 // and try to match again UNLESS we are only allowed to have _one_ repetition.
-                else if item.seq_op != Some(quoted::KleeneOp::ZeroOrOne) {
+                else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
                     item.match_cur = item.match_lo;
                     item.idx = 0;
                     cur_items.push(item);
@@ -555,8 +555,8 @@ fn inner_parse_loop<'root, 'tt>(
                     // implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
                     // result in a "no rules expected token" error by virtue of this matcher not
                     // working.
-                    if seq.kleene.op == quoted::KleeneOp::ZeroOrMore
-                        || seq.kleene.op == quoted::KleeneOp::ZeroOrOne
+                    if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
+                        || seq.kleene.op == mbe::KleeneOp::ZeroOrOne
                     {
                         let mut new_item = item.clone();
                         new_item.match_cur += seq.num_captures;
@@ -648,7 +648,7 @@ fn inner_parse_loop<'root, 'tt>(
 /// - `directory`: Information about the file locations (needed for the black-box parser)
 /// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box
 ///   parser)
-pub fn parse(
+pub(super) fn parse(
     sess: &ParseSess,
     tts: TokenStream,
     ms: &[TokenTree],
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/mbe/macro_rules.rs
index b27e9c54337..816baadb12f 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/mbe/macro_rules.rs
@@ -4,12 +4,12 @@ use crate::edition::Edition;
 use crate::ext::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
 use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
 use crate::ext::expand::{AstFragment, AstFragmentKind};
-use crate::ext::tt::macro_check;
-use crate::ext::tt::macro_parser::{parse, parse_failure_msg};
-use crate::ext::tt::macro_parser::{Error, Failure, Success};
-use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq};
-use crate::ext::tt::quoted;
-use crate::ext::tt::transcribe::transcribe;
+use crate::ext::mbe;
+use crate::ext::mbe::macro_check;
+use crate::ext::mbe::macro_parser::{parse, parse_failure_msg};
+use crate::ext::mbe::macro_parser::{Error, Failure, Success};
+use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedParseResult};
+use crate::ext::mbe::transcribe::transcribe;
 use crate::feature_gate::Features;
 use crate::parse::parser::Parser;
 use crate::parse::token::TokenKind::*;
@@ -35,7 +35,7 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
                                         `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
                                         `literal`, `path`, `meta`, `tt`, `item` and `vis`";
 
-pub struct ParserAnyMacro<'a> {
+crate struct ParserAnyMacro<'a> {
     parser: Parser<'a>,
 
     /// Span of the expansion site of the macro this parser is for
@@ -45,7 +45,11 @@ pub struct ParserAnyMacro<'a> {
     arm_span: Span,
 }
 
-pub fn annotate_err_with_kind(err: &mut DiagnosticBuilder<'_>, kind: AstFragmentKind, span: Span) {
+crate fn annotate_err_with_kind(
+    err: &mut DiagnosticBuilder<'_>,
+    kind: AstFragmentKind,
+    span: Span,
+) {
     match kind {
         AstFragmentKind::Ty => {
             err.span_label(span, "this macro call doesn't expand to a type");
@@ -58,7 +62,7 @@ pub fn annotate_err_with_kind(err: &mut DiagnosticBuilder<'_>, kind: AstFragment
 }
 
 impl<'a> ParserAnyMacro<'a> {
-    pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
+    crate fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
         let ParserAnyMacro { site_span, macro_ident, ref mut parser, arm_span } = *self;
         let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
             if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
@@ -131,8 +135,8 @@ struct MacroRulesMacroExpander {
     name: ast::Ident,
     span: Span,
     transparency: Transparency,
-    lhses: Vec<quoted::TokenTree>,
-    rhses: Vec<quoted::TokenTree>,
+    lhses: Vec<mbe::TokenTree>,
+    rhses: Vec<mbe::TokenTree>,
     valid: bool,
 }
 
@@ -165,8 +169,8 @@ fn generic_extension<'cx>(
     name: ast::Ident,
     transparency: Transparency,
     arg: TokenStream,
-    lhses: &[quoted::TokenTree],
-    rhses: &[quoted::TokenTree],
+    lhses: &[mbe::TokenTree],
+    rhses: &[mbe::TokenTree],
 ) -> Box<dyn MacResult + 'cx> {
     if cx.trace_macros() {
         trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
@@ -178,7 +182,7 @@ fn generic_extension<'cx>(
     for (i, lhs) in lhses.iter().enumerate() {
         // try each arm's matchers
         let lhs_tt = match *lhs {
-            quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
+            mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
             _ => cx.span_bug(sp, "malformed macro lhs"),
         };
 
@@ -186,7 +190,7 @@ fn generic_extension<'cx>(
             Success(named_matches) => {
                 let rhs = match rhses[i] {
                     // ignore delimiters
-                    quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
+                    mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
                     _ => cx.span_bug(sp, "malformed macro rhs"),
                 };
                 let arm_span = rhses[i].span();
@@ -254,7 +258,7 @@ fn generic_extension<'cx>(
         for lhs in lhses {
             // try each arm's matchers
             let lhs_tt = match *lhs {
-                quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
+                mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
                 _ => continue,
             };
             match TokenTree::parse(cx, lhs_tt, arg.clone()) {
@@ -284,8 +288,8 @@ fn generic_extension<'cx>(
 //
 // Holy self-referential!
 
-/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(
+/// Converts a macro item into a syntax extension.
+pub fn compile_declarative_macro(
     sess: &ParseSess,
     features: &Features,
     def: &ast::Item,
@@ -308,32 +312,32 @@ pub fn compile(
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
     let argument_gram = vec![
-        quoted::TokenTree::Sequence(
+        mbe::TokenTree::Sequence(
             DelimSpan::dummy(),
-            Lrc::new(quoted::SequenceRepetition {
+            Lrc::new(mbe::SequenceRepetition {
                 tts: vec![
-                    quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
-                    quoted::TokenTree::token(token::FatArrow, def.span),
-                    quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
+                    mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
+                    mbe::TokenTree::token(token::FatArrow, def.span),
+                    mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
                 ],
                 separator: Some(Token::new(
                     if body.legacy { token::Semi } else { token::Comma },
                     def.span,
                 )),
-                kleene: quoted::KleeneToken::new(quoted::KleeneOp::OneOrMore, def.span),
+                kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
                 num_captures: 2,
             }),
         ),
         // to phase into semicolon-termination instead of semicolon-separation
-        quoted::TokenTree::Sequence(
+        mbe::TokenTree::Sequence(
             DelimSpan::dummy(),
-            Lrc::new(quoted::SequenceRepetition {
-                tts: vec![quoted::TokenTree::token(
+            Lrc::new(mbe::SequenceRepetition {
+                tts: vec![mbe::TokenTree::token(
                     if body.legacy { token::Semi } else { token::Comma },
                     def.span,
                 )],
                 separator: None,
-                kleene: quoted::KleeneToken::new(quoted::KleeneOp::ZeroOrMore, def.span),
+                kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
                 num_captures: 0,
             }),
         ),
@@ -363,7 +367,7 @@ pub fn compile(
             .map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
-                        let tt = quoted::parse(
+                        let tt = mbe::quoted::parse(
                             tt.clone().into(),
                             true,
                             sess,
@@ -380,7 +384,7 @@ pub fn compile(
                 }
                 sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
             })
-            .collect::<Vec<quoted::TokenTree>>(),
+            .collect::<Vec<mbe::TokenTree>>(),
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
     };
 
@@ -390,7 +394,7 @@ pub fn compile(
             .map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
-                        return quoted::parse(
+                        return mbe::quoted::parse(
                             tt.clone().into(),
                             false,
                             sess,
@@ -405,7 +409,7 @@ pub fn compile(
                 }
                 sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
             })
-            .collect::<Vec<quoted::TokenTree>>(),
+            .collect::<Vec<mbe::TokenTree>>(),
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
     };
 
@@ -450,11 +454,11 @@ fn check_lhs_nt_follows(
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
-    lhs: &quoted::TokenTree,
+    lhs: &mbe::TokenTree,
 ) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+    if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
         check_matcher(sess, features, attrs, &tts.tts)
     } else {
         let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
@@ -467,8 +471,8 @@ fn check_lhs_nt_follows(
 
 /// Checks that the lhs contains no repetition which could match an empty token
 /// tree, because then the matcher would hang indefinitely.
-fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
-    use quoted::TokenTree;
+fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
+    use mbe::TokenTree;
     for tt in tts {
         match *tt {
             TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
@@ -482,8 +486,8 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                     && seq.tts.iter().all(|seq_tt| match *seq_tt {
                         TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
                         TokenTree::Sequence(_, ref sub_seq) => {
-                            sub_seq.kleene.op == quoted::KleeneOp::ZeroOrMore
-                                || sub_seq.kleene.op == quoted::KleeneOp::ZeroOrOne
+                            sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
+                                || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
                         }
                         _ => false,
                     })
@@ -502,9 +506,9 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
     true
 }
 
-fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
+fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
     match *rhs {
-        quoted::TokenTree::Delimited(..) => return true,
+        mbe::TokenTree::Delimited(..) => return true,
         _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
     }
     false
@@ -514,7 +518,7 @@ fn check_matcher(
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
-    matcher: &[quoted::TokenTree],
+    matcher: &[mbe::TokenTree],
 ) -> bool {
     let first_sets = FirstSets::new(matcher);
     let empty_suffix = TokenSet::empty();
@@ -546,8 +550,8 @@ struct FirstSets {
 }
 
 impl FirstSets {
-    fn new(tts: &[quoted::TokenTree]) -> FirstSets {
-        use quoted::TokenTree;
+    fn new(tts: &[mbe::TokenTree]) -> FirstSets {
+        use mbe::TokenTree;
 
         let mut sets = FirstSets { first: FxHashMap::default() };
         build_recur(&mut sets, tts);
@@ -594,8 +598,8 @@ impl FirstSets {
 
                         // Reverse scan: Sequence comes before `first`.
                         if subfirst.maybe_empty
-                            || seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
-                            || seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
+                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
+                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
                         {
                             // If sequence is potentially empty, then
                             // union them (preserving first emptiness).
@@ -615,8 +619,8 @@ impl FirstSets {
 
     // walks forward over `tts` until all potential FIRST tokens are
     // identified.
-    fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
-        use quoted::TokenTree;
+    fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
+        use mbe::TokenTree;
 
         let mut first = TokenSet::empty();
         for tt in tts.iter() {
@@ -652,8 +656,8 @@ impl FirstSets {
                     assert!(first.maybe_empty);
                     first.add_all(subfirst);
                     if subfirst.maybe_empty
-                        || seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
-                        || seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
+                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
+                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
                     {
                         // Continue scanning for more first
                         // tokens, but also make sure we
@@ -674,7 +678,7 @@ impl FirstSets {
     }
 }
 
-// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
+// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
 // (for macro-by-example syntactic variables). It also carries the
 // `maybe_empty` flag; that is true if and only if the matcher can
 // match an empty token sequence.
@@ -686,7 +690,7 @@ impl FirstSets {
 // (Notably, we must allow for *-op to occur zero times.)
 #[derive(Clone, Debug)]
 struct TokenSet {
-    tokens: Vec<quoted::TokenTree>,
+    tokens: Vec<mbe::TokenTree>,
     maybe_empty: bool,
 }
 
@@ -698,13 +702,13 @@ impl TokenSet {
 
     // Returns the set `{ tok }` for the single-token (and thus
     // non-empty) sequence [tok].
-    fn singleton(tok: quoted::TokenTree) -> Self {
+    fn singleton(tok: mbe::TokenTree) -> Self {
         TokenSet { tokens: vec![tok], maybe_empty: false }
     }
 
     // Changes self to be the set `{ tok }`.
     // Since `tok` is always present, marks self as non-empty.
-    fn replace_with(&mut self, tok: quoted::TokenTree) {
+    fn replace_with(&mut self, tok: mbe::TokenTree) {
         self.tokens.clear();
         self.tokens.push(tok);
         self.maybe_empty = false;
@@ -719,7 +723,7 @@ impl TokenSet {
     }
 
     // Adds `tok` to the set for `self`, marking sequence as non-empy.
-    fn add_one(&mut self, tok: quoted::TokenTree) {
+    fn add_one(&mut self, tok: mbe::TokenTree) {
         if !self.tokens.contains(&tok) {
             self.tokens.push(tok);
         }
@@ -727,7 +731,7 @@ impl TokenSet {
     }
 
     // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
-    fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
+    fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
         if !self.tokens.contains(&tok) {
             self.tokens.push(tok);
         }
@@ -768,10 +772,10 @@ fn check_matcher_core(
     features: &Features,
     attrs: &[ast::Attribute],
     first_sets: &FirstSets,
-    matcher: &[quoted::TokenTree],
+    matcher: &[mbe::TokenTree],
     follow: &TokenSet,
 ) -> TokenSet {
-    use quoted::TokenTree;
+    use mbe::TokenTree;
 
     let mut last = TokenSet::empty();
 
@@ -946,8 +950,8 @@ fn check_matcher_core(
     last
 }
 
-fn token_can_be_followed_by_any(tok: &quoted::TokenTree) -> bool {
-    if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
+fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
+    if let mbe::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
         frag_can_be_followed_by_any(frag_spec.name)
     } else {
         // (Non NT's can always be followed by anthing in matchers.)
@@ -993,8 +997,8 @@ enum IsInFollow {
 /// break macros that were relying on that binary operator as a
 /// separator.
 // when changing this do not forget to update doc/book/macros.md!
-fn is_in_follow(tok: &quoted::TokenTree, frag: Symbol) -> IsInFollow {
-    use quoted::TokenTree;
+fn is_in_follow(tok: &mbe::TokenTree, frag: Symbol) -> IsInFollow {
+    use mbe::TokenTree;
 
     if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
         // closing a token tree can never be matched by any fragment;
@@ -1112,10 +1116,10 @@ fn has_legal_fragment_specifier(
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
-    tok: &quoted::TokenTree,
+    tok: &mbe::TokenTree,
 ) -> Result<(), String> {
     debug!("has_legal_fragment_specifier({:?})", tok);
-    if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
+    if let mbe::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
         let frag_span = tok.span();
         if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
             return Err(frag_spec.to_string());
@@ -1156,14 +1160,27 @@ fn is_legal_fragment_specifier(
     }
 }
 
-fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
+fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
     match *tt {
-        quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
-        quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
-        quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
+        mbe::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
+        mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
+        mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!(
-            "unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+            "unexpected mbe::TokenTree::{{Sequence or Delimited}} \
              in follow set checker"
         ),
     }
 }
+
+impl TokenTree {
+    /// Use this token tree as a matcher to parse given tts.
+    fn parse(cx: &ExtCtxt<'_>, mtch: &[mbe::TokenTree], tts: TokenStream)
+             -> NamedParseResult {
+        // `None` is because we're not interpolating
+        let directory = Directory {
+            path: Cow::from(cx.current_expansion.module.directory.as_path()),
+            ownership: cx.current_expansion.directory_ownership,
+        };
+        parse(cx.parse_sess(), tts, mtch, Some(directory), true)
+    }
+}
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/mbe/quoted.rs
index cad94a0e4c1..3952e29a5f0 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/mbe/quoted.rs
@@ -1,179 +1,19 @@
 use crate::ast;
 use crate::ast::NodeId;
-use crate::ext::tt::macro_parser;
+use crate::ext::mbe::macro_parser;
+use crate::ext::mbe::{TokenTree, KleeneOp, KleeneToken, SequenceRepetition, Delimited};
 use crate::feature_gate::Features;
-use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::token::{self, Token};
 use crate::parse::ParseSess;
 use crate::print::pprust;
 use crate::symbol::kw;
-use crate::tokenstream::{self, DelimSpan};
+use crate::tokenstream;
 
-use syntax_pos::{edition::Edition, BytePos, Span};
+use syntax_pos::{edition::Edition, Span};
 
 use rustc_data_structures::sync::Lrc;
 use std::iter::Peekable;
 
-/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
-/// that the delimiter itself might be `NoDelim`.
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
-pub struct Delimited {
-    pub delim: token::DelimToken,
-    pub tts: Vec<TokenTree>,
-}
-
-impl Delimited {
-    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
-    pub fn open_tt(&self, span: Span) -> TokenTree {
-        let open_span = if span.is_dummy() {
-            span
-        } else {
-            span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
-        };
-        TokenTree::token(token::OpenDelim(self.delim), open_span)
-    }
-
-    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
-    pub fn close_tt(&self, span: Span) -> TokenTree {
-        let close_span = if span.is_dummy() {
-            span
-        } else {
-            span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
-        };
-        TokenTree::token(token::CloseDelim(self.delim), close_span)
-    }
-}
-
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
-pub struct SequenceRepetition {
-    /// The sequence of token trees
-    pub tts: Vec<TokenTree>,
-    /// The optional separator
-    pub separator: Option<Token>,
-    /// Whether the sequence can be repeated zero (*), or one or more times (+)
-    pub kleene: KleeneToken,
-    /// The number of `Match`s that appear in the sequence (and subsequences)
-    pub num_captures: usize,
-}
-
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
-pub struct KleeneToken {
-    pub span: Span,
-    pub op: KleeneOp,
-}
-
-impl KleeneToken {
-    pub fn new(op: KleeneOp, span: Span) -> KleeneToken {
-        KleeneToken { span, op }
-    }
-}
-
-/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
-/// for token sequences.
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum KleeneOp {
-    /// Kleene star (`*`) for zero or more repetitions
-    ZeroOrMore,
-    /// Kleene plus (`+`) for one or more repetitions
-    OneOrMore,
-    /// Kleene optional (`?`) for zero or one reptitions
-    ZeroOrOne,
-}
-
-/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
-/// are "first-class" token trees. Useful for parsing macros.
-#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
-pub enum TokenTree {
-    Token(Token),
-    Delimited(DelimSpan, Lrc<Delimited>),
-    /// A kleene-style repetition sequence
-    Sequence(DelimSpan, Lrc<SequenceRepetition>),
-    /// e.g., `$var`
-    MetaVar(Span, ast::Ident),
-    /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
-    MetaVarDecl(
-        Span,
-        ast::Ident, /* name to bind */
-        ast::Ident, /* kind of nonterminal */
-    ),
-}
-
-impl TokenTree {
-    /// Return the number of tokens in the tree.
-    pub fn len(&self) -> usize {
-        match *self {
-            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
-                token::NoDelim => delimed.tts.len(),
-                _ => delimed.tts.len() + 2,
-            },
-            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
-            _ => 0,
-        }
-    }
-
-    /// Returns `true` if the given token tree contains no other tokens. This is vacuously true for
-    /// single tokens or metavar/decls, but may be false for delimited trees or sequences.
-    pub fn is_empty(&self) -> bool {
-        match *self {
-            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
-                token::NoDelim => delimed.tts.is_empty(),
-                _ => false,
-            },
-            TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
-            _ => true,
-        }
-    }
-
-    /// Returns `true` if the given token tree is delimited.
-    pub fn is_delimited(&self) -> bool {
-        match *self {
-            TokenTree::Delimited(..) => true,
-            _ => false,
-        }
-    }
-
-    /// Returns `true` if the given token tree is a token of the given kind.
-    pub fn is_token(&self, expected_kind: &TokenKind) -> bool {
-        match self {
-            TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
-            _ => false,
-        }
-    }
-
-    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
-    pub fn get_tt(&self, index: usize) -> TokenTree {
-        match (self, index) {
-            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
-                delimed.tts[index].clone()
-            }
-            (&TokenTree::Delimited(span, ref delimed), _) => {
-                if index == 0 {
-                    return delimed.open_tt(span.open);
-                }
-                if index == delimed.tts.len() + 1 {
-                    return delimed.close_tt(span.close);
-                }
-                delimed.tts[index - 1].clone()
-            }
-            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
-            _ => panic!("Cannot expand a token tree"),
-        }
-    }
-
-    /// Retrieves the `TokenTree`'s span.
-    pub fn span(&self) -> Span {
-        match *self {
-            TokenTree::Token(Token { span, .. })
-            | TokenTree::MetaVar(span, _)
-            | TokenTree::MetaVarDecl(span, _, _) => span,
-            TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
-        }
-    }
-
-    crate fn token(kind: TokenKind, span: Span) -> TokenTree {
-        TokenTree::Token(Token::new(kind, span))
-    }
-}
-
 /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
 /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
 /// collection of `TokenTree` for use in parsing a macro.
@@ -195,7 +35,7 @@ impl TokenTree {
 /// # Returns
 ///
 /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
-pub fn parse(
+pub(super) fn parse(
     input: tokenstream::TokenStream,
     expect_matchers: bool,
     sess: &ParseSess,
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/mbe/transcribe.rs
index f9c07e3a2e4..ba818ebd35c 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/mbe/transcribe.rs
@@ -1,7 +1,7 @@
 use crate::ast::{Ident, Mac};
 use crate::ext::base::ExtCtxt;
-use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
-use crate::ext::tt::quoted;
+use crate::ext::mbe;
+use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
 use crate::mut_visit::{self, MutVisitor};
 use crate::parse::token::{self, NtTT, Token};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
@@ -38,22 +38,22 @@ impl Marker {
 
 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
 enum Frame {
-    Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
-    Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
+    Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
+    Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
 }
 
 impl Frame {
     /// Construct a new frame around the delimited set of tokens.
-    fn new(tts: Vec<quoted::TokenTree>) -> Frame {
-        let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts });
+    fn new(tts: Vec<mbe::TokenTree>) -> Frame {
+        let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
         Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
     }
 }
 
 impl Iterator for Frame {
-    type Item = quoted::TokenTree;
+    type Item = mbe::TokenTree;
 
-    fn next(&mut self) -> Option<quoted::TokenTree> {
+    fn next(&mut self) -> Option<mbe::TokenTree> {
         match *self {
             Frame::Delimited { ref forest, ref mut idx, .. } => {
                 *idx += 1;
@@ -90,7 +90,7 @@ impl Iterator for Frame {
 pub(super) fn transcribe(
     cx: &ExtCtxt<'_>,
     interp: &FxHashMap<Ident, NamedMatch>,
-    src: Vec<quoted::TokenTree>,
+    src: Vec<mbe::TokenTree>,
     transparency: Transparency,
 ) -> TokenStream {
     // Nothing for us to transcribe...
@@ -178,7 +178,7 @@ pub(super) fn transcribe(
             // We are descending into a sequence. We first make sure that the matchers in the RHS
             // and the matches in `interp` have the same shape. Otherwise, either the caller or the
             // macro writer has made a mistake.
-            seq @ quoted::TokenTree::Sequence(..) => {
+            seq @ mbe::TokenTree::Sequence(..) => {
                 match lockstep_iter_size(&seq, interp, &repeats) {
                     LockstepIterSize::Unconstrained => {
                         cx.span_fatal(
@@ -199,7 +199,7 @@ pub(super) fn transcribe(
                     LockstepIterSize::Constraint(len, _) => {
                         // We do this to avoid an extra clone above. We know that this is a
                         // sequence already.
-                        let (sp, seq) = if let quoted::TokenTree::Sequence(sp, seq) = seq {
+                        let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
                             (sp, seq)
                         } else {
                             unreachable!()
@@ -207,7 +207,7 @@ pub(super) fn transcribe(
 
                         // Is the repetition empty?
                         if len == 0 {
-                            if seq.kleene.op == quoted::KleeneOp::OneOrMore {
+                            if seq.kleene.op == mbe::KleeneOp::OneOrMore {
                                 // FIXME: this really ought to be caught at macro definition
                                 // time... It happens when the Kleene operator in the matcher and
                                 // the body for the same meta-variable do not match.
@@ -232,7 +232,7 @@ pub(super) fn transcribe(
             }
 
             // Replace the meta-var with the matched token tree from the invocation.
-            quoted::TokenTree::MetaVar(mut sp, mut ident) => {
+            mbe::TokenTree::MetaVar(mut sp, mut ident) => {
                 // Find the matched nonterminal from the macro invocation, and use it to replace
                 // the meta-var.
                 if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
@@ -269,7 +269,7 @@ pub(super) fn transcribe(
             // We will produce all of the results of the inside of the `Delimited` and then we will
             // jump back out of the Delimited, pop the result_stack and add the new results back to
             // the previous results (from outside the Delimited).
-            quoted::TokenTree::Delimited(mut span, delimited) => {
+            mbe::TokenTree::Delimited(mut span, delimited) => {
                 marker.visit_delim_span(&mut span);
                 stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
                 result_stack.push(mem::take(&mut result));
@@ -277,14 +277,14 @@ pub(super) fn transcribe(
 
             // Nothing much to do here. Just push the token to the result, being careful to
             // preserve syntax context.
-            quoted::TokenTree::Token(token) => {
+            mbe::TokenTree::Token(token) => {
                 let mut tt = TokenTree::Token(token);
                 marker.visit_tt(&mut tt);
                 result.push(tt.into());
             }
 
             // There should be no meta-var declarations in the invocation of a macro.
-            quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
+            mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
         }
     }
 }
@@ -368,11 +368,11 @@ impl LockstepIterSize {
 /// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
 /// multiple nested matcher sequences.
 fn lockstep_iter_size(
-    tree: &quoted::TokenTree,
+    tree: &mbe::TokenTree,
     interpolations: &FxHashMap<Ident, NamedMatch>,
     repeats: &[(usize, usize)],
 ) -> LockstepIterSize {
-    use quoted::TokenTree;
+    use mbe::TokenTree;
     match *tree {
         TokenTree::Delimited(_, ref delimed) => {
             delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index b4ae1e87bca..b0833010fe0 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -162,19 +162,14 @@ pub mod ext {
     mod proc_macro_server;
 
     pub use syntax_pos::hygiene;
+    pub use mbe::macro_rules::compile_declarative_macro;
     pub mod allocator;
     pub mod base;
     pub mod build;
     pub mod expand;
     pub mod proc_macro;
 
-    pub mod tt {
-        pub mod transcribe;
-        pub mod macro_check;
-        pub mod macro_parser;
-        pub mod macro_rules;
-        pub mod quoted;
-    }
+    crate mod mbe;
 }
 
 pub mod early_buffered_lints;
diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs
index cf196645e4f..0d073f0cc97 100644
--- a/src/libsyntax/parse/parser/item.rs
+++ b/src/libsyntax/parse/parser/item.rs
@@ -24,7 +24,7 @@ use crate::symbol::{kw, sym};
 use std::mem;
 use log::debug;
 use rustc_target::spec::abi::Abi;
-use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
+use errors::{Applicability, DiagnosticBuilder, DiagnosticId, StashKey};
 
 /// Whether the type alias or associated type is a concrete type or an opaque type.
 #[derive(Debug)]
@@ -1477,10 +1477,23 @@ impl<'a> Parser<'a> {
         }
     }
 
+    /// Parse `["const" | ("static" "mut"?)] $ident ":" $ty = $expr` with
+    /// `["const" | ("static" "mut"?)]` already parsed and stored in `m`.
+    ///
+    /// When `m` is `"const"`, `$ident` may also be `"_"`.
     fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
         let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
-        self.expect(&token::Colon)?;
-        let ty = self.parse_ty()?;
+
+        // Parse the type of a `const` or `static mut?` item.
+        // That is, the `":" $ty` fragment.
+        let ty = if self.token == token::Eq {
+            self.recover_missing_const_type(id, m)
+        } else {
+            // Not `=` so expect `":"" $ty` as usual.
+            self.expect(&token::Colon)?;
+            self.parse_ty()?
+        };
+
         self.expect(&token::Eq)?;
         let e = self.parse_expr()?;
         self.expect(&token::Semi)?;
@@ -1491,6 +1504,34 @@ impl<'a> Parser<'a> {
         Ok((id, item, None))
     }
 
+    /// We were supposed to parse `:` but instead, we're already at `=`.
+    /// This means that the type is missing.
+    fn recover_missing_const_type(&mut self, id: Ident, m: Option<Mutability>) -> P<Ty> {
+        // Construct the error and stash it away with the hope
+        // that typeck will later enrich the error with a type.
+        let kind = match m {
+            Some(Mutability::Mutable) => "static mut",
+            Some(Mutability::Immutable) => "static",
+            None => "const",
+        };
+        let mut err = self.struct_span_err(id.span, &format!("missing type for `{}` item", kind));
+        err.span_suggestion(
+            id.span,
+            "provide a type for the item",
+            format!("{}: <type>", id),
+            Applicability::HasPlaceholders,
+        );
+        err.stash(id.span, StashKey::ItemNoType);
+
+        // The user intended that the type be inferred,
+        // so treat this as if the user wrote e.g. `const A: _ = expr;`.
+        P(Ty {
+            node: TyKind::Infer,
+            span: id.span,
+            id: ast::DUMMY_NODE_ID,
+        })
+    }
+
     /// Parses `type Foo = Bar;` or returns `None`
     /// without modifying the parser state.
     fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, Generics)>> {
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index d702038f54e..26cae2a8e7c 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -13,9 +13,6 @@
 //! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
-use crate::ext::base;
-use crate::ext::tt::{macro_parser, quoted};
-use crate::parse::Directory;
 use crate::parse::token::{self, DelimToken, Token, TokenKind};
 use crate::print::pprust;
 
@@ -26,7 +23,6 @@ use rustc_data_structures::sync::Lrc;
 use rustc_serialize::{Decoder, Decodable, Encoder, Encodable};
 use smallvec::{SmallVec, smallvec};
 
-use std::borrow::Cow;
 use std::{fmt, iter, mem};
 
 #[cfg(test)]
@@ -63,17 +59,6 @@ where
 {}
 
 impl TokenTree {
-    /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
-                 -> macro_parser::NamedParseResult {
-        // `None` is because we're not interpolating
-        let directory = Directory {
-            path: Cow::from(cx.current_expansion.module.directory.as_path()),
-            ownership: cx.current_expansion.directory_ownership,
-        };
-        macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory), true)
-    }
-
     /// Checks if this TokenTree is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {