about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorMatthias Einwag <matthias.einwag@live.com>2019-02-12 22:46:14 -0800
committerMatthias Einwag <matthias.einwag@live.com>2019-02-12 22:46:14 -0800
commit871338c3aed87cb84f02ebd7fd9b447966d5b05d (patch)
treea2e1315d7d17d0b9f3463686ed2fbf36f3238ec4 /src/libsyntax
parent1ef34a5a39641846e824b6450a705d6031002beb (diff)
parent0f949c2fcc696d0260a99196d5e5400c59a26a54 (diff)
downloadrust-871338c3aed87cb84f02ebd7fd9b447966d5b05d.tar.gz
rust-871338c3aed87cb84f02ebd7fd9b447966d5b05d.zip
Merging master
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/Cargo.toml1
-rw-r--r--src/libsyntax/ast.rs134
-rw-r--r--src/libsyntax/attr/builtin.rs23
-rw-r--r--src/libsyntax/attr/mod.rs108
-rw-r--r--src/libsyntax/config.rs205
-rw-r--r--src/libsyntax/diagnostics/metadata.rs13
-rw-r--r--src/libsyntax/diagnostics/plugin.rs29
-rw-r--r--src/libsyntax/early_buffered_lints.rs4
-rw-r--r--src/libsyntax/entry.rs4
-rw-r--r--src/libsyntax/ext/base.rs185
-rw-r--r--src/libsyntax/ext/build.rs41
-rw-r--r--src/libsyntax/ext/derive.rs29
-rw-r--r--src/libsyntax/ext/expand.rs373
-rw-r--r--src/libsyntax/ext/placeholders.rs111
-rw-r--r--src/libsyntax/ext/source_util.rs43
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs49
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs154
-rw-r--r--src/libsyntax/ext/tt/quoted.rs37
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs27
-rw-r--r--src/libsyntax/feature_gate.rs87
-rw-r--r--src/libsyntax/fold.rs1495
-rw-r--r--src/libsyntax/json.rs18
-rw-r--r--src/libsyntax/lib.rs37
-rw-r--r--src/libsyntax/mut_visit.rs1335
-rw-r--r--src/libsyntax/parse/attr.rs18
-rw-r--r--src/libsyntax/parse/classify.rs2
-rw-r--r--src/libsyntax/parse/lexer/comments.rs35
-rw-r--r--src/libsyntax/parse/lexer/mod.rs58
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs36
-rw-r--r--src/libsyntax/parse/lexer/unicode_chars.rs2
-rw-r--r--src/libsyntax/parse/mod.rs159
-rw-r--r--src/libsyntax/parse/parser.rs991
-rw-r--r--src/libsyntax/parse/token.rs90
-rw-r--r--src/libsyntax/print/pp.rs7
-rw-r--r--src/libsyntax/print/pprust.rs59
-rw-r--r--src/libsyntax/ptr.rs8
-rw-r--r--src/libsyntax/show_span.rs8
-rw-r--r--src/libsyntax/source_map.rs26
-rw-r--r--src/libsyntax/std_inject.rs21
-rw-r--r--src/libsyntax/test.rs118
-rw-r--r--src/libsyntax/test_snippet.rs9
-rw-r--r--src/libsyntax/tokenstream.rs42
-rw-r--r--src/libsyntax/util/lev_distance.rs8
-rw-r--r--src/libsyntax/util/map_in_place.rs (renamed from src/libsyntax/util/move_map.rs)29
-rw-r--r--src/libsyntax/util/node_count.rs6
-rw-r--r--src/libsyntax/util/parser.rs24
-rw-r--r--src/libsyntax/util/parser_testing.rs24
-rw-r--r--src/libsyntax/visit.rs15
48 files changed, 3251 insertions, 3086 deletions
diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml
index fba2623e005..f1e60ba78b7 100644
--- a/src/libsyntax/Cargo.toml
+++ b/src/libsyntax/Cargo.toml
@@ -2,6 +2,7 @@
 authors = ["The Rust Project Developers"]
 name = "syntax"
 version = "0.0.0"
+edition = "2018"
 
 [lib]
 name = "syntax"
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index af521848e90..ab62dd2bc9b 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -1,22 +1,23 @@
 // The Rust abstract syntax tree.
 
-pub use self::GenericArgs::*;
-pub use self::UnsafeSource::*;
-pub use symbol::{Ident, Symbol as Name};
-pub use util::parser::ExprPrecedence;
-
-use ext::hygiene::{Mark, SyntaxContext};
-use print::pprust;
-use ptr::P;
+pub use GenericArgs::*;
+pub use UnsafeSource::*;
+pub use crate::symbol::{Ident, Symbol as Name};
+pub use crate::util::parser::ExprPrecedence;
+
+use crate::ext::hygiene::{Mark, SyntaxContext};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::source_map::{dummy_spanned, respan, Spanned};
+use crate::symbol::{keywords, Symbol};
+use crate::tokenstream::TokenStream;
+use crate::ThinVec;
+
 use rustc_data_structures::indexed_vec::Idx;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert;
 use rustc_target::spec::abi::Abi;
-use source_map::{dummy_spanned, respan, Spanned};
-use symbol::{keywords, Symbol};
 use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::TokenStream;
-use ThinVec;
 
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::sync::Lrc;
@@ -31,7 +32,7 @@ pub struct Label {
 }
 
 impl fmt::Debug for Label {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "label({:?})", self.ident)
     }
 }
@@ -43,7 +44,7 @@ pub struct Lifetime {
 }
 
 impl fmt::Debug for Lifetime {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
             "lifetime({}: {})",
@@ -74,13 +75,13 @@ impl<'a> PartialEq<&'a str> for Path {
 }
 
 impl fmt::Debug for Path {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "path({})", pprust::path_to_string(self))
     }
 }
 
 impl fmt::Display for Path {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", pprust::path_to_string(self))
     }
 }
@@ -128,14 +129,14 @@ impl PathSegment {
     }
 }
 
-/// Arguments of a path segment.
+/// The arguments of a path segment.
 ///
 /// E.g., `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum GenericArgs {
-    /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
+    /// The `<'a, A, B, C>` in `foo::bar::baz::<'a, A, B, C>`.
     AngleBracketed(AngleBracketedArgs),
-    /// The `(A,B)` and `C` in `Foo(A,B) -> C`
+    /// The `(A, B)` and `C` in `Foo(A, B) -> C`.
     Parenthesized(ParenthesizedArgs),
 }
 
@@ -166,18 +167,28 @@ impl GenericArgs {
 pub enum GenericArg {
     Lifetime(Lifetime),
     Type(P<Ty>),
+    Const(AnonConst),
 }
 
-/// A path like `Foo<'a, T>`
+impl GenericArg {
+    pub fn span(&self) -> Span {
+        match self {
+            GenericArg::Lifetime(lt) => lt.ident.span,
+            GenericArg::Type(ty) => ty.span,
+            GenericArg::Const(ct) => ct.value.span,
+        }
+    }
+}
+
+/// A path like `Foo<'a, T>`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)]
 pub struct AngleBracketedArgs {
-    /// Overall span
+    /// The overall span.
     pub span: Span,
     /// The arguments for this path segment.
     pub args: Vec<GenericArg>,
     /// Bindings (equality constraints) on associated types, if present.
-    ///
-    /// E.g., `Foo<A=Bar>`.
+    /// E.g., `Foo<A = Bar>`.
     pub bindings: Vec<TypeBinding>,
 }
 
@@ -193,7 +204,7 @@ impl Into<Option<P<GenericArgs>>> for ParenthesizedArgs {
     }
 }
 
-/// A path like `Foo(A,B) -> C`
+/// A path like `Foo(A, B) -> C`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct ParenthesizedArgs {
     /// Overall span
@@ -219,6 +230,7 @@ impl ParenthesizedArgs {
 // hack to ensure that we don't try to access the private parts of `NodeId` in this module
 mod node_id_inner {
     use rustc_data_structures::indexed_vec::Idx;
+    use rustc_data_structures::newtype_index;
     newtype_index! {
         pub struct NodeId {
             ENCODABLE = custom
@@ -227,7 +239,7 @@ mod node_id_inner {
     }
 }
 
-pub use self::node_id_inner::NodeId;
+pub use node_id_inner::NodeId;
 
 impl NodeId {
     pub fn placeholder_from_mark(mark: Mark) -> Self {
@@ -240,7 +252,7 @@ impl NodeId {
 }
 
 impl fmt::Display for NodeId {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(&self.as_u32(), f)
     }
 }
@@ -257,7 +269,7 @@ impl serialize::UseSpecializedDecodable for NodeId {
     }
 }
 
-/// Node id used to represent the root of the crate.
+/// `NodeId` used to represent the root of the crate.
 pub const CRATE_NODE_ID: NodeId = NodeId::from_u32_const(0);
 
 /// When parsing and doing expansions, we initially give all AST nodes this AST
@@ -294,13 +306,32 @@ impl GenericBound {
 
 pub type GenericBounds = Vec<GenericBound>;
 
+/// Specifies the enforced ordering for generic parameters. In the future,
+/// if we wanted to relax this order, we could override `PartialEq` and
+/// `PartialOrd`, to allow the kinds to be unordered.
+#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
+pub enum ParamKindOrd {
+    Lifetime,
+    Type,
+    Const,
+}
+
+impl fmt::Display for ParamKindOrd {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            ParamKindOrd::Lifetime => "lifetime".fmt(f),
+            ParamKindOrd::Type => "type".fmt(f),
+            ParamKindOrd::Const => "const".fmt(f),
+        }
+    }
+}
+
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum GenericParamKind {
     /// A lifetime definition (e.g., `'a: 'b + 'c + 'd`).
     Lifetime,
-    Type {
-        default: Option<P<Ty>>,
-    },
+    Type { default: Option<P<Ty>> },
+    Const { ty: P<Ty> },
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
@@ -337,7 +368,7 @@ impl Default for Generics {
     }
 }
 
-/// A `where` clause in a definition
+/// A where-clause in a definition.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct WhereClause {
     pub id: NodeId,
@@ -345,7 +376,7 @@ pub struct WhereClause {
     pub span: Span,
 }
 
-/// A single predicate in a `where` clause
+/// A single predicate in a where-clause.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum WherePredicate {
     /// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`).
@@ -478,7 +509,7 @@ pub struct Pat {
 }
 
 impl fmt::Debug for Pat {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self))
     }
 }
@@ -676,7 +707,7 @@ pub enum BinOpKind {
 
 impl BinOpKind {
     pub fn to_string(&self) -> &'static str {
-        use self::BinOpKind::*;
+        use BinOpKind::*;
         match *self {
             Add => "+",
             Sub => "-",
@@ -713,7 +744,7 @@ impl BinOpKind {
     }
 
     pub fn is_comparison(&self) -> bool {
-        use self::BinOpKind::*;
+        use BinOpKind::*;
         match *self {
             Eq | Lt | Le | Ne | Gt | Ge => true,
             And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
@@ -792,7 +823,7 @@ impl Stmt {
 }
 
 impl fmt::Debug for Stmt {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
             "stmt({}: {})",
@@ -1030,7 +1061,7 @@ impl Expr {
 }
 
 impl fmt::Debug for Expr {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self))
     }
 }
@@ -1438,13 +1469,13 @@ pub enum IntTy {
 }
 
 impl fmt::Debug for IntTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(self, f)
     }
 }
 
 impl fmt::Display for IntTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", self.ty_to_string())
     }
 }
@@ -1519,13 +1550,13 @@ impl UintTy {
 }
 
 impl fmt::Debug for UintTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(self, f)
     }
 }
 
 impl fmt::Display for UintTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", self.ty_to_string())
     }
 }
@@ -1547,7 +1578,7 @@ pub struct Ty {
 }
 
 impl fmt::Debug for Ty {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "type({})", pprust::ty_to_string(self))
     }
 }
@@ -1560,7 +1591,7 @@ pub struct BareFnTy {
     pub decl: P<FnDecl>,
 }
 
-/// The different kinds of types recognized by the compiler.
+/// The various kinds of type recognized by the compiler.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum TyKind {
     /// A variable-length slice (`[T]`).
@@ -1832,7 +1863,7 @@ pub enum Defaultness {
 }
 
 impl fmt::Display for Unsafety {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(
             match *self {
                 Unsafety::Normal => "normal",
@@ -1852,7 +1883,7 @@ pub enum ImplPolarity {
 }
 
 impl fmt::Debug for ImplPolarity {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             ImplPolarity::Positive => "positive".fmt(f),
             ImplPolarity::Negative => "negative".fmt(f),
@@ -1862,7 +1893,7 @@ impl fmt::Debug for ImplPolarity {
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum FunctionRetTy {
-    /// Return type is not specified.
+    /// Returns type is not specified.
     ///
     /// Functions default to `()` and closures default to inference.
     /// Span points to where return type would be inserted.
@@ -2004,10 +2035,10 @@ pub struct Attribute {
 
 /// `TraitRef`s appear in impls.
 ///
-/// Resolve maps each `TraitRef`'s `ref_id` to its defining trait; that's all
+/// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all
 /// that the `ref_id` is for. The `impl_id` maps to the "self type" of this impl.
 /// If this impl is an `ItemKind::Impl`, the `impl_id` is redundant (it could be the
-/// same as the impl's node-id).
+/// same as the impl's `NodeId`).
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct TraitRef {
     pub path: Path,
@@ -2164,6 +2195,13 @@ pub struct Item {
     pub tokens: Option<TokenStream>,
 }
 
+impl Item {
+    /// Return the span that encompasses the attributes.
+    pub fn span_with_attributes(&self) -> Span {
+        self.attrs.iter().fold(self.span, |acc, attr| acc.to(attr.span()))
+    }
+}
+
 /// A function header.
 ///
 /// All the information between the visibility and the name of the function is
diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs
index 7fe6f4a2316..520984b8091 100644
--- a/src/libsyntax/attr/builtin.rs
+++ b/src/libsyntax/attr/builtin.rs
@@ -1,9 +1,10 @@
 //! Parsing and validation of builtin attributes
 
-use ast::{self, Attribute, MetaItem, Name, NestedMetaItemKind};
-use errors::{Applicability, Handler};
-use feature_gate::{Features, GatedCfg};
-use parse::ParseSess;
+use crate::ast::{self, Attribute, MetaItem, Name, NestedMetaItemKind};
+use crate::errors::{Applicability, Handler};
+use crate::feature_gate::{Features, GatedCfg};
+use crate::parse::ParseSess;
+
 use syntax_pos::{symbol::Symbol, Span};
 
 use super::{list_contains_name, mark_used, MetaItemKind};
@@ -162,7 +163,7 @@ pub struct RustcDeprecation {
     pub suggestion: Option<Symbol>,
 }
 
-/// Check if `attrs` contains an attribute like `#![feature(feature_name)]`.
+/// Checks if `attrs` contains an attribute like `#![feature(feature_name)]`.
 /// This will not perform any "sanity checks" on the form of the attributes.
 pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
     attrs.iter().any(|item| {
@@ -176,7 +177,7 @@ pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
     })
 }
 
-/// Find the first stability attribute. `None` if none exists.
+/// Finds the first stability attribute. `None` if none exists.
 pub fn find_stability(sess: &ParseSess, attrs: &[Attribute],
                       item_sp: Span) -> Option<Stability> {
     find_stability_generic(sess, attrs.iter(), item_sp)
@@ -188,7 +189,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                                  -> Option<Stability>
     where I: Iterator<Item = &'a Attribute>
 {
-    use self::StabilityLevel::*;
+    use StabilityLevel::*;
 
     let mut stab: Option<Stability> = None;
     let mut rustc_depr: Option<RustcDeprecation> = None;
@@ -579,7 +580,7 @@ pub struct Deprecation {
     pub note: Option<Symbol>,
 }
 
-/// Find the deprecation attribute. `None` if none exists.
+/// Finds the deprecation attribute. `None` if none exists.
 pub fn find_deprecation(sess: &ParseSess, attrs: &[Attribute],
                         item_sp: Span) -> Option<Deprecation> {
     find_deprecation_generic(sess, attrs.iter(), item_sp)
@@ -694,7 +695,7 @@ pub enum IntType {
 impl IntType {
     #[inline]
     pub fn is_signed(self) -> bool {
-        use self::IntType::*;
+        use IntType::*;
 
         match self {
             SignedInt(..) => true,
@@ -711,7 +712,7 @@ impl IntType {
 /// structure layout, `packed` to remove padding, and `transparent` to elegate representation
 /// concerns to the only non-ZST field.
 pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
-    use self::ReprAttr::*;
+    use ReprAttr::*;
 
     let mut acc = Vec::new();
     let diagnostic = &sess.span_diagnostic;
@@ -831,7 +832,7 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
 }
 
 fn int_type_of_word(s: &str) -> Option<IntType> {
-    use self::IntType::*;
+    use IntType::*;
 
     match s {
         "i8" => Some(SignedInt(ast::IntTy::I8)),
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index 58be7c3e085..29e86e0ecee 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -2,32 +2,36 @@
 
 mod builtin;
 
-pub use self::builtin::{
+pub use builtin::{
     cfg_matches, contains_feature_attr, eval_condition, find_crate_name, find_deprecation,
     find_repr_attrs, find_stability, find_unwind_attr, Deprecation, InlineAttr, OptimizeAttr,
     IntType, ReprAttr, RustcDeprecation, Stability, StabilityLevel, UnwindAttr,
 };
-pub use self::IntType::*;
-pub use self::ReprAttr::*;
-pub use self::StabilityLevel::*;
-
-use ast;
-use ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
-use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
-use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
-use source_map::{BytePos, Spanned, respan, dummy_spanned};
+pub use IntType::*;
+pub use ReprAttr::*;
+pub use StabilityLevel::*;
+
+use crate::ast;
+use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
+use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
+use crate::ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
+use crate::mut_visit::visit_clobber;
+use crate::source_map::{BytePos, Spanned, respan, dummy_spanned};
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::parser::Parser;
+use crate::parse::{self, ParseSess, PResult};
+use crate::parse::token::{self, Token};
+use crate::ptr::P;
+use crate::symbol::Symbol;
+use crate::ThinVec;
+use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
+use crate::GLOBALS;
+
+use log::debug;
 use syntax_pos::{FileName, Span};
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::parser::Parser;
-use parse::{self, ParseSess, PResult};
-use parse::token::{self, Token};
-use ptr::P;
-use symbol::Symbol;
-use ThinVec;
-use tokenstream::{TokenStream, TokenTree, DelimSpan};
-use GLOBALS;
 
 use std::iter;
+use std::ops::DerefMut;
 
 pub fn mark_used(attr: &Attribute) {
     debug!("Marking {:?} as used.", attr);
@@ -81,7 +85,7 @@ impl NestedMetaItem {
         self.span
     }
 
-    /// Returns true if this list item is a MetaItem with a name of `name`.
+    /// Returns `true` if this list item is a MetaItem with a name of `name`.
     pub fn check_name(&self, name: &str) -> bool {
         self.meta_item().map_or(false, |meta_item| meta_item.check_name(name))
     }
@@ -268,7 +272,7 @@ impl MetaItem {
 }
 
 impl Attribute {
-    /// Extract the MetaItem from inside this Attribute.
+    /// Extracts the MetaItem from inside this Attribute.
     pub fn meta(&self) -> Option<MetaItem> {
         let mut tokens = self.tokens.trees().peekable();
         Some(MetaItem {
@@ -324,7 +328,7 @@ impl Attribute {
         })
     }
 
-    /// Convert self to a normal #[doc="foo"] comment, if it is a
+    /// Converts self to a normal #[doc="foo"] comment, if it is a
     /// comment like `///` or `/** */`. (Returns self unchanged for
     /// non-sugared doc attributes.)
     pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
@@ -630,7 +634,7 @@ impl LitKind {
 
         match *self {
             LitKind::Str(string, ast::StrStyle::Cooked) => {
-                let escaped = string.as_str().escape_default();
+                let escaped = string.as_str().escape_default().to_string();
                 Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
             }
             LitKind::Str(string, ast::StrStyle::Raw(n)) => {
@@ -695,13 +699,13 @@ impl LitKind {
 
 pub trait HasAttrs: Sized {
     fn attrs(&self) -> &[ast::Attribute];
-    fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self;
+    fn visit_attrs<F: FnOnce(&mut Vec<ast::Attribute>)>(&mut self, f: F);
 }
 
 impl<T: HasAttrs> HasAttrs for Spanned<T> {
     fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() }
-    fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self {
-        respan(self.span, self.node.map_attrs(f))
+    fn visit_attrs<F: FnOnce(&mut Vec<ast::Attribute>)>(&mut self, f: F) {
+        self.node.visit_attrs(f);
     }
 }
 
@@ -709,7 +713,7 @@ impl HasAttrs for Vec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
         self
     }
-    fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
         f(self)
     }
 }
@@ -718,8 +722,12 @@ impl HasAttrs for ThinVec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
         self
     }
-    fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
-        f(self.into()).into()
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
+        visit_clobber(self, |this| {
+            let mut vec = this.into();
+            f(&mut vec);
+            vec.into()
+        });
     }
 }
 
@@ -727,8 +735,8 @@ impl<T: HasAttrs + 'static> HasAttrs for P<T> {
     fn attrs(&self) -> &[Attribute] {
         (**self).attrs()
     }
-    fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
-        self.map(|t| t.map_attrs(f))
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
+        (**self).visit_attrs(f);
     }
 }
 
@@ -745,23 +753,27 @@ impl HasAttrs for StmtKind {
         }
     }
 
-    fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
         match self {
-            StmtKind::Local(local) => StmtKind::Local(local.map_attrs(f)),
-            StmtKind::Item(..) => self,
-            StmtKind::Expr(expr) => StmtKind::Expr(expr.map_attrs(f)),
-            StmtKind::Semi(expr) => StmtKind::Semi(expr.map_attrs(f)),
-            StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, style, attrs)| {
-                (mac, style, attrs.map_attrs(f))
-            })),
+            StmtKind::Local(local) => local.visit_attrs(f),
+            StmtKind::Item(..) => {}
+            StmtKind::Expr(expr) => expr.visit_attrs(f),
+            StmtKind::Semi(expr) => expr.visit_attrs(f),
+            StmtKind::Mac(mac) => {
+                let (_mac, _style, attrs) = mac.deref_mut();
+                attrs.visit_attrs(f);
+            }
         }
     }
 }
 
 impl HasAttrs for Stmt {
-    fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() }
-    fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self {
-        Stmt { id: self.id, node: self.node.map_attrs(f), span: self.span }
+    fn attrs(&self) -> &[ast::Attribute] {
+        self.node.attrs()
+    }
+
+    fn visit_attrs<F: FnOnce(&mut Vec<ast::Attribute>)>(&mut self, f: F) {
+        self.node.visit_attrs(f);
     }
 }
 
@@ -770,9 +782,8 @@ impl HasAttrs for GenericParam {
         &self.attrs
     }
 
-    fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(mut self, f: F) -> Self {
-        self.attrs = self.attrs.map_attrs(f);
-        self
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
+        self.attrs.visit_attrs(f);
     }
 }
 
@@ -783,11 +794,8 @@ macro_rules! derive_has_attrs {
                 &self.attrs
             }
 
-            fn map_attrs<F>(mut self, f: F) -> Self
-                where F: FnOnce(Vec<Attribute>) -> Vec<Attribute>,
-            {
-                self.attrs = self.attrs.map_attrs(f);
-                self
+            fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
+                self.attrs.visit_attrs(f);
             }
         }
     )* }
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index 2930ce079c8..5bab9e4e2c9 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -1,20 +1,21 @@
-use attr::HasAttrs;
-use feature_gate::{
+use crate::attr::HasAttrs;
+use crate::feature_gate::{
     feature_err,
     EXPLAIN_STMT_ATTR_SYNTAX,
     Features,
     get_features,
     GateIssue,
 };
-use {fold, attr};
-use ast;
-use source_map::Spanned;
-use edition::Edition;
-use parse::{token, ParseSess};
-use smallvec::SmallVec;
-use errors::Applicability;
+use crate::attr;
+use crate::ast;
+use crate::edition::Edition;
+use crate::errors::Applicability;
+use crate::mut_visit::*;
+use crate::parse::{token, ParseSess};
+use crate::ptr::P;
+use crate::util::map_in_place::MapInPlace;
 
-use ptr::P;
+use smallvec::SmallVec;
 
 /// A folder that strips out items that do not belong in the current configuration.
 pub struct StripUnconfigured<'a> {
@@ -64,8 +65,8 @@ macro_rules! configure {
 }
 
 impl<'a> StripUnconfigured<'a> {
-    pub fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
-        let node = self.process_cfg_attrs(node);
+    pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> {
+        self.process_cfg_attrs(&mut node);
         if self.in_cfg(node.attrs()) { Some(node) } else { None }
     }
 
@@ -75,10 +76,10 @@ impl<'a> StripUnconfigured<'a> {
     /// Gives compiler warnigns if any `cfg_attr` does not contain any
     /// attributes and is in the original source code. Gives compiler errors if
     /// the syntax of any `cfg_attr` is incorrect.
-    pub fn process_cfg_attrs<T: HasAttrs>(&mut self, node: T) -> T {
-        node.map_attrs(|attrs| {
-            attrs.into_iter().flat_map(|attr| self.process_cfg_attr(attr)).collect()
-        })
+    pub fn process_cfg_attrs<T: HasAttrs>(&mut self, node: &mut T) {
+        node.visit_attrs(|attrs| {
+            attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
+        });
     }
 
     /// Parse and expand a single `cfg_attr` attribute into a list of attributes
@@ -87,7 +88,7 @@ impl<'a> StripUnconfigured<'a> {
     ///
     /// Gives a compiler warning when the `cfg_attr` contains no attributes and
     /// is in the original source file. Gives a compiler error if the syntax of
-    /// the attribute is incorrect
+    /// the attribute is incorrect.
     fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
         if !attr.check_name("cfg_attr") {
             return vec![attr];
@@ -145,7 +146,7 @@ impl<'a> StripUnconfigured<'a> {
         }
     }
 
-    /// Determine if a node with the given attributes should be included in this configuration.
+    /// Determines if a node with the given attributes should be included in this configuration.
     pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {
         attrs.iter().all(|attr| {
             if !is_cfg(attr) {
@@ -217,76 +218,47 @@ impl<'a> StripUnconfigured<'a> {
         }
     }
 
-    pub fn configure_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {
-        ast::ForeignMod {
-            abi: foreign_mod.abi,
-            items: foreign_mod.items.into_iter().filter_map(|item| self.configure(item)).collect(),
-        }
+    pub fn configure_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
+        let ast::ForeignMod { abi: _, items } = foreign_mod;
+        items.flat_map_in_place(|item| self.configure(item));
     }
 
-    fn configure_variant_data(&mut self, vdata: ast::VariantData) -> ast::VariantData {
+    fn configure_variant_data(&mut self, vdata: &mut ast::VariantData) {
         match vdata {
-            ast::VariantData::Struct(fields, id) => {
-                let fields = fields.into_iter().filter_map(|field| self.configure(field));
-                ast::VariantData::Struct(fields.collect(), id)
-            }
-            ast::VariantData::Tuple(fields, id) => {
-                let fields = fields.into_iter().filter_map(|field| self.configure(field));
-                ast::VariantData::Tuple(fields.collect(), id)
-            }
-            ast::VariantData::Unit(id) => ast::VariantData::Unit(id)
+            ast::VariantData::Struct(fields, _id) |
+            ast::VariantData::Tuple(fields, _id) =>
+                fields.flat_map_in_place(|field| self.configure(field)),
+            ast::VariantData::Unit(_id) => {}
         }
     }
 
-    pub fn configure_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
+    pub fn configure_item_kind(&mut self, item: &mut ast::ItemKind) {
         match item {
-            ast::ItemKind::Struct(def, generics) => {
-                ast::ItemKind::Struct(self.configure_variant_data(def), generics)
-            }
-            ast::ItemKind::Union(def, generics) => {
-                ast::ItemKind::Union(self.configure_variant_data(def), generics)
-            }
-            ast::ItemKind::Enum(def, generics) => {
-                let variants = def.variants.into_iter().filter_map(|v| {
-                    self.configure(v).map(|v| {
-                        Spanned {
-                            node: ast::Variant_ {
-                                ident: v.node.ident,
-                                attrs: v.node.attrs,
-                                data: self.configure_variant_data(v.node.data),
-                                disr_expr: v.node.disr_expr,
-                            },
-                            span: v.span
-                        }
-                    })
-                });
-                ast::ItemKind::Enum(ast::EnumDef {
-                    variants: variants.collect(),
-                }, generics)
+            ast::ItemKind::Struct(def, _generics) |
+            ast::ItemKind::Union(def, _generics) => self.configure_variant_data(def),
+            ast::ItemKind::Enum(ast::EnumDef { variants }, _generics) => {
+                variants.flat_map_in_place(|variant| self.configure(variant));
+                for variant in variants {
+                    self.configure_variant_data(&mut variant.node.data);
+                }
             }
-            item => item,
+            _ => {}
         }
     }
 
-    pub fn configure_expr_kind(&mut self, expr_kind: ast::ExprKind) -> ast::ExprKind {
+    pub fn configure_expr_kind(&mut self, expr_kind: &mut ast::ExprKind) {
         match expr_kind {
-            ast::ExprKind::Match(m, arms) => {
-                let arms = arms.into_iter().filter_map(|a| self.configure(a)).collect();
-                ast::ExprKind::Match(m, arms)
+            ast::ExprKind::Match(_m, arms) => {
+                arms.flat_map_in_place(|arm| self.configure(arm));
             }
-            ast::ExprKind::Struct(path, fields, base) => {
-                let fields = fields.into_iter()
-                    .filter_map(|field| {
-                        self.configure(field)
-                    })
-                    .collect();
-                ast::ExprKind::Struct(path, fields, base)
+            ast::ExprKind::Struct(_path, fields, _base) => {
+                fields.flat_map_in_place(|field| self.configure(field));
             }
-            _ => expr_kind,
+            _ => {}
         }
     }
 
-    pub fn configure_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
+    pub fn configure_expr(&mut self, expr: &mut P<ast::Expr>) {
         self.visit_expr_attrs(expr.attrs());
 
         // If an expr is valid to cfg away it will have been removed by the
@@ -294,8 +266,8 @@ impl<'a> StripUnconfigured<'a> {
         // Anything else is always required, and thus has to error out
         // in case of a cfg attr.
         //
-        // N.B., this is intentionally not part of the fold_expr() function
-        //     in order for fold_opt_expr() to be able to avoid this check
+        // N.B., this is intentionally not part of the visit_expr() function
+        //     in order for filter_map_expr() to be able to avoid this check
         if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a)) {
             let msg = "removing an expression is not supported in this position";
             self.sess.span_diagnostic.span_err(attr.span, msg);
@@ -304,30 +276,14 @@ impl<'a> StripUnconfigured<'a> {
         self.process_cfg_attrs(expr)
     }
 
-    pub fn configure_stmt(&mut self, stmt: ast::Stmt) -> Option<ast::Stmt> {
-        self.configure(stmt)
-    }
-
-    pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
-        self.configure(field)
-    }
-
-    pub fn configure_pat(&mut self, pattern: P<ast::Pat>) -> P<ast::Pat> {
-        pattern.map(|mut pattern| {
-            if let ast::PatKind::Struct(path, fields, etc) = pattern.node {
-                let fields = fields.into_iter()
-                    .filter_map(|field| {
-                        self.configure(field)
-                    })
-                    .collect();
-                pattern.node = ast::PatKind::Struct(path, fields, etc);
-            }
-            pattern
-        })
+    pub fn configure_pat(&mut self, pat: &mut P<ast::Pat>) {
+        if let ast::PatKind::Struct(_path, fields, _etc) = &mut pat.node {
+            fields.flat_map_in_place(|field| self.configure(field));
+        }
     }
 
-    // deny #[cfg] on generic parameters until we decide what to do with it.
-    // see issue #51279.
+    /// Denies `#[cfg]` on generic parameters until we decide what to do with it.
+    /// See issue #51279.
     pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) {
         for attr in param.attrs() {
             let offending_attr = if attr.check_name("cfg") {
@@ -343,57 +299,54 @@ impl<'a> StripUnconfigured<'a> {
     }
 }
 
-impl<'a> fold::Folder for StripUnconfigured<'a> {
-    fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {
-        let foreign_mod = self.configure_foreign_mod(foreign_mod);
-        fold::noop_fold_foreign_mod(foreign_mod, self)
+impl<'a> MutVisitor for StripUnconfigured<'a> {
+    fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
+        self.configure_foreign_mod(foreign_mod);
+        noop_visit_foreign_mod(foreign_mod, self);
     }
 
-    fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
-        let item = self.configure_item_kind(item);
-        fold::noop_fold_item_kind(item, self)
+    fn visit_item_kind(&mut self, item: &mut ast::ItemKind) {
+        self.configure_item_kind(item);
+        noop_visit_item_kind(item, self);
     }
 
-    fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
-        let mut expr = self.configure_expr(expr).into_inner();
-        expr.node = self.configure_expr_kind(expr.node);
-        P(fold::noop_fold_expr(expr, self))
+    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
+        self.configure_expr(expr);
+        self.configure_expr_kind(&mut expr.node);
+        noop_visit_expr(expr, self);
     }
 
-    fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
-        let mut expr = configure!(self, expr).into_inner();
-        expr.node = self.configure_expr_kind(expr.node);
-        Some(P(fold::noop_fold_expr(expr, self)))
+    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
+        let mut expr = configure!(self, expr);
+        self.configure_expr_kind(&mut expr.node);
+        noop_visit_expr(&mut expr, self);
+        Some(expr)
     }
 
-    fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
-        match self.configure_stmt(stmt) {
-            Some(stmt) => fold::noop_fold_stmt(stmt, self),
-            None => return SmallVec::new(),
-        }
+    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
+        noop_flat_map_stmt(configure!(self, stmt), self)
     }
 
-    fn fold_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
-        fold::noop_fold_item(configure!(self, item), self)
+    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+        noop_flat_map_item(configure!(self, item), self)
     }
 
-    fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]>
-    {
-        fold::noop_fold_impl_item(configure!(self, item), self)
+    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
+        noop_flat_map_impl_item(configure!(self, item), self)
     }
 
-    fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
-        fold::noop_fold_trait_item(configure!(self, item), self)
+    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
+        noop_flat_map_trait_item(configure!(self, item), self)
     }
 
-    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
+    fn visit_mac(&mut self, _mac: &mut ast::Mac) {
         // Don't configure interpolated AST (cf. issue #34171).
         // Interpolated AST will get configured once the surrounding tokens are parsed.
-        mac
     }
 
-    fn fold_pat(&mut self, pattern: P<ast::Pat>) -> P<ast::Pat> {
-        fold::noop_fold_pat(self.configure_pat(pattern), self)
+    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
+        self.configure_pat(pat);
+        noop_visit_pat(pat, self)
     }
 }
 
diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs
index abde3dca0f6..704135fe1d5 100644
--- a/src/libsyntax/diagnostics/metadata.rs
+++ b/src/libsyntax/diagnostics/metadata.rs
@@ -12,8 +12,9 @@ use std::error::Error;
 use rustc_serialize::json::as_json;
 
 use syntax_pos::{Span, FileName};
-use ext::base::ExtCtxt;
-use diagnostics::plugin::{ErrorMap, ErrorInfo};
+
+use crate::ext::base::ExtCtxt;
+use crate::diagnostics::plugin::{ErrorMap, ErrorInfo};
 
 /// JSON encodable/decodable version of `ErrorInfo`.
 #[derive(PartialEq, RustcDecodable, RustcEncodable)]
@@ -33,8 +34,8 @@ pub struct ErrorLocation {
 }
 
 impl ErrorLocation {
-    /// Create an error location from a span.
-    pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
+    /// Creates an error location from a span.
+    pub fn from_span(ecx: &ExtCtxt<'_>, sp: Span) -> ErrorLocation {
         let loc = ecx.source_map().lookup_char_pos_adj(sp.lo());
         ErrorLocation {
             filename: loc.filename,
@@ -43,7 +44,7 @@ impl ErrorLocation {
     }
 }
 
-/// Get the directory where metadata for a given `prefix` should be stored.
+/// Gets the directory where metadata for a given `prefix` should be stored.
 ///
 /// See `output_metadata`.
 pub fn get_metadata_dir(prefix: &str) -> PathBuf {
@@ -62,7 +63,7 @@ fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf {
 ///
 /// For our current purposes the prefix is the target architecture and the name is a crate name.
 /// If an error occurs steps will be taken to ensure that no file is created.
-pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap)
+pub fn output_metadata(ecx: &ExtCtxt<'_>, prefix: &str, name: &str, err_map: &ErrorMap)
     -> Result<(), Box<dyn Error>>
 {
     // Create the directory to place the file in.
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index fa6b825f2a2..e79378d93bd 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -1,20 +1,21 @@
 use std::collections::BTreeMap;
 use std::env;
 
-use ast;
-use ast::{Ident, Name};
-use source_map;
+use crate::ast::{self, Ident, Name};
+use crate::source_map;
+use crate::ext::base::{ExtCtxt, MacEager, MacResult};
+use crate::ext::build::AstBuilder;
+use crate::parse::token;
+use crate::ptr::P;
+use crate::symbol::{keywords, Symbol};
+use crate::tokenstream::{TokenTree};
+
+use smallvec::smallvec;
 use syntax_pos::Span;
-use ext::base::{ExtCtxt, MacEager, MacResult};
-use ext::build::AstBuilder;
-use parse::token;
-use ptr::P;
-use symbol::{keywords, Symbol};
-use tokenstream::{TokenTree};
 
-use diagnostics::metadata::output_metadata;
+use crate::diagnostics::metadata::output_metadata;
 
-pub use errors::*;
+pub use crate::errors::*;
 
 // Maximum width of any line in an extended error description (inclusive).
 const MAX_DESCRIPTION_WIDTH: usize = 80;
@@ -28,7 +29,7 @@ pub struct ErrorInfo {
 /// Mapping from error codes to metadata.
 pub type ErrorMap = BTreeMap<Name, ErrorInfo>;
 
-pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
+pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                    span: Span,
                                    token_tree: &[TokenTree])
                                    -> Box<dyn MacResult+'cx> {
@@ -61,7 +62,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
     MacEager::expr(ecx.expr_tuple(span, Vec::new()))
 }
 
-pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
+pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                        span: Span,
                                        token_tree: &[TokenTree])
                                        -> Box<dyn MacResult+'cx> {
@@ -134,7 +135,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
 }
 
 #[allow(deprecated)]
-pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
+pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                           span: Span,
                                           token_tree: &[TokenTree])
                                           -> Box<dyn MacResult+'cx> {
diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs
index cf9671a14b3..29cb9cd7f30 100644
--- a/src/libsyntax/early_buffered_lints.rs
+++ b/src/libsyntax/early_buffered_lints.rs
@@ -3,7 +3,7 @@
 //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
 //! redundant. Later, these types can be converted to types for use by the rest of the compiler.
 
-use syntax::ast::NodeId;
+use crate::syntax::ast::NodeId;
 use syntax_pos::MultiSpan;
 
 /// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be
@@ -12,6 +12,8 @@ pub enum BufferedEarlyLintId {
     /// Usage of `?` as a macro separator is deprecated.
     QuestionMarkMacroSep,
     IllFormedAttributeInput,
+    /// Usage of a duplicate macro matcher binding name.
+    DuplicateMacroMatcherBindingName,
 }
 
 /// Stores buffered lint info which can later be passed to `librustc`.
diff --git a/src/libsyntax/entry.rs b/src/libsyntax/entry.rs
index 72a550a05d5..09e26e29d86 100644
--- a/src/libsyntax/entry.rs
+++ b/src/libsyntax/entry.rs
@@ -1,5 +1,5 @@
-use attr;
-use ast::{Item, ItemKind};
+use crate::attr;
+use crate::ast::{Item, ItemKind};
 
 pub enum EntryPointType {
     None,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 09e7e57f78c..8491b3d0cad 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -1,28 +1,29 @@
-pub use self::SyntaxExtension::*;
-
-use ast::{self, Attribute, Name, PatKind, MetaItem};
-use attr::HasAttrs;
-use source_map::{SourceMap, Spanned, respan};
+pub use SyntaxExtension::*;
+
+use crate::ast::{self, Attribute, Name, PatKind, MetaItem};
+use crate::attr::HasAttrs;
+use crate::source_map::{SourceMap, Spanned, respan};
+use crate::edition::Edition;
+use crate::errors::{DiagnosticBuilder, DiagnosticId};
+use crate::ext::expand::{self, AstFragment, Invocation};
+use crate::ext::hygiene::{self, Mark, SyntaxContext, Transparency};
+use crate::mut_visit::{self, MutVisitor};
+use crate::parse::{self, parser, DirectoryOwnership};
+use crate::parse::token;
+use crate::ptr::P;
+use crate::symbol::{keywords, Ident, Symbol};
+use crate::ThinVec;
+use crate::tokenstream::{self, TokenStream};
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{Span, MultiSpan, DUMMY_SP};
-use edition::Edition;
-use errors::{DiagnosticBuilder, DiagnosticId};
-use ext::expand::{self, AstFragment, Invocation};
-use ext::hygiene::{self, Mark, SyntaxContext, Transparency};
-use fold::{self, Folder};
-use parse::{self, parser, DirectoryOwnership};
-use parse::token;
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::{keywords, Ident, Symbol};
-use ThinVec;
 
 use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::{self, Lrc};
 use std::iter;
 use std::path::PathBuf;
 use std::rc::Rc;
-use rustc_data_structures::sync::{self, Lrc};
 use std::default::Default;
-use tokenstream::{self, TokenStream};
 
 
 #[derive(Debug,Clone)]
@@ -47,15 +48,14 @@ impl HasAttrs for Annotatable {
         }
     }
 
-    fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
         match self {
-            Annotatable::Item(item) => Annotatable::Item(item.map_attrs(f)),
-            Annotatable::TraitItem(trait_item) => Annotatable::TraitItem(trait_item.map_attrs(f)),
-            Annotatable::ImplItem(impl_item) => Annotatable::ImplItem(impl_item.map_attrs(f)),
-            Annotatable::ForeignItem(foreign_item) =>
-                Annotatable::ForeignItem(foreign_item.map_attrs(f)),
-            Annotatable::Stmt(stmt) => Annotatable::Stmt(stmt.map_attrs(f)),
-            Annotatable::Expr(expr) => Annotatable::Expr(expr.map_attrs(f)),
+            Annotatable::Item(item) => item.visit_attrs(f),
+            Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
+            Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
+            Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
+            Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
+            Annotatable::Expr(expr) => expr.visit_attrs(f),
         }
     }
 }
@@ -140,7 +140,7 @@ impl Annotatable {
 // A more flexible ItemDecorator.
 pub trait MultiItemDecorator {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               sp: Span,
               meta_item: &ast::MetaItem,
               item: &Annotatable,
@@ -148,10 +148,10 @@ pub trait MultiItemDecorator {
 }
 
 impl<F> MultiItemDecorator for F
-    where F : Fn(&mut ExtCtxt, Span, &ast::MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
+    where F : Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
 {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               sp: Span,
               meta_item: &ast::MetaItem,
               item: &Annotatable,
@@ -164,7 +164,7 @@ impl<F> MultiItemDecorator for F
 // FIXME Decorators should follow the same pattern too.
 pub trait MultiItemModifier {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               span: Span,
               meta_item: &ast::MetaItem,
               item: Annotatable)
@@ -172,11 +172,11 @@ pub trait MultiItemModifier {
 }
 
 impl<F, T> MultiItemModifier for F
-    where F: Fn(&mut ExtCtxt, Span, &ast::MetaItem, Annotatable) -> T,
+    where F: Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, Annotatable) -> T,
           T: Into<Vec<Annotatable>>,
 {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               span: Span,
               meta_item: &ast::MetaItem,
               item: Annotatable)
@@ -193,7 +193,7 @@ impl Into<Vec<Annotatable>> for Annotatable {
 
 pub trait ProcMacro {
     fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
+                   ecx: &'cx mut ExtCtxt<'_>,
                    span: Span,
                    ts: TokenStream)
                    -> TokenStream;
@@ -203,7 +203,7 @@ impl<F> ProcMacro for F
     where F: Fn(TokenStream) -> TokenStream
 {
     fn expand<'cx>(&self,
-                   _ecx: &'cx mut ExtCtxt,
+                   _ecx: &'cx mut ExtCtxt<'_>,
                    _span: Span,
                    ts: TokenStream)
                    -> TokenStream {
@@ -214,7 +214,7 @@ impl<F> ProcMacro for F
 
 pub trait AttrProcMacro {
     fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
+                   ecx: &'cx mut ExtCtxt<'_>,
                    span: Span,
                    annotation: TokenStream,
                    annotated: TokenStream)
@@ -225,7 +225,7 @@ impl<F> AttrProcMacro for F
     where F: Fn(TokenStream, TokenStream) -> TokenStream
 {
     fn expand<'cx>(&self,
-                   _ecx: &'cx mut ExtCtxt,
+                   _ecx: &'cx mut ExtCtxt<'_>,
                    _span: Span,
                    annotation: TokenStream,
                    annotated: TokenStream)
@@ -239,7 +239,7 @@ impl<F> AttrProcMacro for F
 pub trait TTMacroExpander {
     fn expand<'cx>(
         &self,
-        ecx: &'cx mut ExtCtxt,
+        ecx: &'cx mut ExtCtxt<'_>,
         span: Span,
         input: TokenStream,
         def_span: Option<Span>,
@@ -247,47 +247,47 @@ pub trait TTMacroExpander {
 }
 
 pub type MacroExpanderFn =
-    for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
                 -> Box<dyn MacResult+'cx>;
 
 impl<F> TTMacroExpander for F
-    where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
+    where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
     -> Box<dyn MacResult+'cx>
 {
     fn expand<'cx>(
         &self,
-        ecx: &'cx mut ExtCtxt,
+        ecx: &'cx mut ExtCtxt<'_>,
         span: Span,
         input: TokenStream,
         _def_span: Option<Span>,
     ) -> Box<dyn MacResult+'cx> {
         struct AvoidInterpolatedIdents;
 
-        impl Folder for AvoidInterpolatedIdents {
-            fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
-                if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
+        impl MutVisitor for AvoidInterpolatedIdents {
+            fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
+                if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
                     if let token::NtIdent(ident, is_raw) = nt.0 {
-                        return tokenstream::TokenTree::Token(ident.span,
-                                                             token::Ident(ident, is_raw));
+                        *tt = tokenstream::TokenTree::Token(ident.span,
+                                                            token::Ident(ident, is_raw));
                     }
                 }
-                fold::noop_fold_tt(tt, self)
+                mut_visit::noop_visit_tt(tt, self)
             }
 
-            fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
-                fold::noop_fold_mac(mac, self)
+            fn visit_mac(&mut self, mac: &mut ast::Mac) {
+                mut_visit::noop_visit_mac(mac, self)
             }
         }
 
         let input: Vec<_> =
-            input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect();
+            input.trees().map(|mut tt| { AvoidInterpolatedIdents.visit_tt(&mut tt); tt }).collect();
         (*self)(ecx, span, &input)
     }
 }
 
 pub trait IdentMacroExpander {
     fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt,
+                   cx: &'cx mut ExtCtxt<'_>,
                    sp: Span,
                    ident: ast::Ident,
                    token_tree: Vec<tokenstream::TokenTree>)
@@ -295,15 +295,15 @@ pub trait IdentMacroExpander {
 }
 
 pub type IdentMacroExpanderFn =
-    for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>)
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, ast::Ident, Vec<tokenstream::TokenTree>)
                 -> Box<dyn MacResult+'cx>;
 
 impl<F> IdentMacroExpander for F
-    where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident,
+    where F : for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, ast::Ident,
                           Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx>
 {
     fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt,
+                   cx: &'cx mut ExtCtxt<'_>,
                    sp: Span,
                    ident: ast::Ident,
                    token_tree: Vec<tokenstream::TokenTree>)
@@ -327,34 +327,34 @@ macro_rules! make_stmts_default {
 /// The result of a macro expansion. The return values of the various
 /// methods are spliced into the AST at the callsite of the macro.
 pub trait MacResult {
-    /// Create an expression.
+    /// Creates an expression.
     fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
         None
     }
-    /// Create zero or more items.
+    /// Creates zero or more items.
     fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
         None
     }
 
-    /// Create zero or more impl items.
+    /// Creates zero or more impl items.
     fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
         None
     }
 
-    /// Create zero or more trait items.
+    /// Creates zero or more trait items.
     fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
         None
     }
 
-    /// Create zero or more items in an `extern {}` block
+    /// Creates zero or more items in an `extern {}` block
     fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None }
 
-    /// Create a pattern.
+    /// Creates a pattern.
     fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
         None
     }
 
-    /// Create zero or more statements.
+    /// Creates zero or more statements.
     ///
     /// By default this attempts to create an expression statement,
     /// returning None if that fails.
@@ -461,7 +461,7 @@ pub struct DummyResult {
 }
 
 impl DummyResult {
-    /// Create a default MacResult that can be anything.
+    /// Creates a default MacResult that can be anything.
     ///
     /// Use this as a return value after hitting any errors and
     /// calling `span_err`.
@@ -474,7 +474,7 @@ impl DummyResult {
         Box::new(DummyResult { expr_only: false, is_error: false, span })
     }
 
-    /// Create a default MacResult that can only be an expression.
+    /// Creates a default MacResult that can only be an expression.
     ///
     /// Use this for macros that must expand to an expression, so even
     /// if an error is encountered internally, the user will receive
@@ -568,7 +568,7 @@ impl MacResult for DummyResult {
 }
 
 pub type BuiltinDeriveFn =
-    for<'cx> fn(&'cx mut ExtCtxt, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable));
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable));
 
 /// Represents different kinds of macro invocations that can be resolved.
 #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -621,7 +621,8 @@ pub enum SyntaxExtension {
     /// A function-like procedural macro. TokenStream -> TokenStream.
     ProcMacro {
         expander: Box<dyn ProcMacro + sync::Sync + sync::Send>,
-        allow_internal_unstable: bool,
+        /// Whitelist of unstable features that are treated as stable inside this macro
+        allow_internal_unstable: Option<Lrc<[Symbol]>>,
         edition: Edition,
     },
 
@@ -638,8 +639,10 @@ pub enum SyntaxExtension {
         expander: Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
         def_info: Option<(ast::NodeId, Span)>,
         /// Whether the contents of the macro can
-        /// directly use `#[unstable]` things (true == yes).
-        allow_internal_unstable: bool,
+        /// directly use `#[unstable]` things.
+        ///
+        /// Only allows things that require a feature gate in the given whitelist
+        allow_internal_unstable: Option<Lrc<[Symbol]>>,
         /// Whether the contents of the macro can use `unsafe`
         /// without triggering the `unsafe_code` lint.
         allow_internal_unsafe: bool,
@@ -654,8 +657,11 @@ pub enum SyntaxExtension {
 
     /// A function-like syntax extension that has an extra ident before
     /// the block.
-    ///
-    IdentTT(Box<dyn IdentMacroExpander + sync::Sync + sync::Send>, Option<Span>, bool),
+    IdentTT {
+        expander: Box<dyn IdentMacroExpander + sync::Sync + sync::Send>,
+        span: Option<Span>,
+        allow_internal_unstable: Option<Lrc<[Symbol]>>,
+    },
 
     /// An attribute-like procedural macro. TokenStream -> TokenStream.
     /// The input is the annotated item.
@@ -677,12 +683,12 @@ pub enum SyntaxExtension {
 }
 
 impl SyntaxExtension {
-    /// Return which kind of macro calls this syntax extension.
+    /// Returns which kind of macro calls this syntax extension.
     pub fn kind(&self) -> MacroKind {
         match *self {
             SyntaxExtension::DeclMacro { .. } |
             SyntaxExtension::NormalTT { .. } |
-            SyntaxExtension::IdentTT(..) |
+            SyntaxExtension::IdentTT { .. } |
             SyntaxExtension::ProcMacro { .. } =>
                 MacroKind::Bang,
             SyntaxExtension::NonMacroAttr { .. } |
@@ -716,7 +722,7 @@ impl SyntaxExtension {
             SyntaxExtension::ProcMacroDerive(.., edition) => edition,
             // Unstable legacy stuff
             SyntaxExtension::NonMacroAttr { .. } |
-            SyntaxExtension::IdentTT(..) |
+            SyntaxExtension::IdentTT { .. } |
             SyntaxExtension::MultiDecorator(..) |
             SyntaxExtension::MultiModifier(..) |
             SyntaxExtension::BuiltinDerive(..) => hygiene::default_edition(),
@@ -835,8 +841,8 @@ impl<'a> ExtCtxt<'a> {
         expand::MacroExpander::new(self, false)
     }
 
-    /// Returns a `Folder` that deeply expands all macros and assigns all node ids in an AST node.
-    /// Once node ids are assigned, the node may not be expanded, removed, or otherwise modified.
+    /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node.
+    /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified.
     pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
         expand::MacroExpander::new(self, true)
     }
@@ -976,22 +982,19 @@ impl<'a> ExtCtxt<'a> {
     }
 }
 
-/// Extract a string literal from the macro expanded version of `expr`,
+/// Extracts a string literal from the macro expanded version of `expr`,
 /// emitting `err_msg` if `expr` is not a string literal. This does not stop
-/// compilation on error, merely emits a non-fatal error and returns None.
+/// compilation on error, merely emits a non-fatal error and returns `None`.
 pub fn expr_to_spanned_string<'a>(
-    cx: &'a mut ExtCtxt,
-    expr: P<ast::Expr>,
+    cx: &'a mut ExtCtxt<'_>,
+    mut expr: P<ast::Expr>,
     err_msg: &str,
 ) -> Result<Spanned<(Symbol, ast::StrStyle)>, Option<DiagnosticBuilder<'a>>> {
     // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation.
-    let expr = expr.map(|mut expr| {
-        expr.span = expr.span.apply_mark(cx.current_expansion.mark);
-        expr
-    });
+    expr.span = expr.span.apply_mark(cx.current_expansion.mark);
 
     // we want to be able to handle e.g., `concat!("foo", "bar")`
-    let expr = cx.expander().fold_expr(expr);
+    cx.expander().visit_expr(&mut expr);
     Err(match expr.node {
         ast::ExprKind::Lit(ref l) => match l.node {
             ast::LitKind::Str(s, style) => return Ok(respan(expr.span, (s, style))),
@@ -1002,7 +1005,7 @@ pub fn expr_to_spanned_string<'a>(
     })
 }
 
-pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
+pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
                       -> Option<(Symbol, ast::StrStyle)> {
     expr_to_spanned_string(cx, expr, err_msg)
         .map_err(|err| err.map(|mut err| err.emit()))
@@ -1015,7 +1018,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
 /// compilation should call
 /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
 /// done as rarely as possible).
-pub fn check_zero_tts(cx: &ExtCtxt,
+pub fn check_zero_tts(cx: &ExtCtxt<'_>,
                       sp: Span,
                       tts: &[tokenstream::TokenTree],
                       name: &str) {
@@ -1025,8 +1028,8 @@ pub fn check_zero_tts(cx: &ExtCtxt,
 }
 
 /// Interpreting `tts` as a comma-separated sequence of expressions,
-/// expect exactly one string literal, or emit an error and return None.
-pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
+/// expect exactly one string literal, or emit an error and return `None`.
+pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
                                sp: Span,
                                tts: &[tokenstream::TokenTree],
                                name: &str)
@@ -1047,15 +1050,17 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
     })
 }
 
-/// Extract comma-separated expressions from `tts`. If there is a
-/// parsing error, emit a non-fatal error and return None.
-pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
+/// Extracts comma-separated expressions from `tts`. If there is a
+/// parsing error, emit a non-fatal error and return `None`.
+pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
                           sp: Span,
                           tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
     let mut p = cx.new_parser_from_tts(tts);
     let mut es = Vec::new();
     while p.token != token::Eof {
-        es.push(cx.expander().fold_expr(panictry!(p.parse_expr())));
+        let mut expr = panictry!(p.parse_expr());
+        cx.expander().visit_expr(&mut expr);
+        es.push(expr);
         if p.eat(&token::Comma) {
             continue;
         }
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index a8eec1a74dd..48f6e4c0c82 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -1,17 +1,18 @@
+use crate::ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
+use crate::attr;
+use crate::source_map::{dummy_spanned, respan, Spanned};
+use crate::ext::base::ExtCtxt;
+use crate::ptr::P;
+use crate::symbol::{Symbol, keywords};
+use crate::ThinVec;
+
 use rustc_target::spec::abi::Abi;
-use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
-use attr;
 use syntax_pos::{Pos, Span, DUMMY_SP};
-use source_map::{dummy_spanned, respan, Spanned};
-use ext::base::ExtCtxt;
-use ptr::P;
-use symbol::{Symbol, keywords};
-use ThinVec;
 
 // Transitional re-exports so qquote can find the paths it is looking for
 mod syntax {
-    pub use ext;
-    pub use parse;
+    pub use crate::ext;
+    pub use crate::parse;
 }
 
 pub trait AstBuilder {
@@ -37,12 +38,14 @@ pub trait AstBuilder {
                 bindings: Vec<ast::TypeBinding>)
                 -> (ast::QSelf, ast::Path);
 
-    // types
+    // types and consts
     fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy;
 
     fn ty(&self, span: Span, ty: ast::TyKind) -> P<ast::Ty>;
     fn ty_path(&self, path: ast::Path) -> P<ast::Ty>;
     fn ty_ident(&self, span: Span, idents: ast::Ident) -> P<ast::Ty>;
+    fn anon_const(&self, span: Span, expr: ast::ExprKind) -> ast::AnonConst;
+    fn const_ident(&self, span: Span, idents: ast::Ident) -> ast::AnonConst;
 
     fn ty_rptr(&self, span: Span,
                ty: P<ast::Ty>,
@@ -344,7 +347,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
 
     /// Constructs a qualified path.
     ///
-    /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A=Bar>`.
+    /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A = Bar>`.
     fn qpath_all(&self,
                  self_type: P<ast::Ty>,
                  trait_path: ast::Path,
@@ -393,6 +396,22 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.ty_path(self.path_ident(span, ident))
     }
 
+    fn anon_const(&self, span: Span, expr: ast::ExprKind) -> ast::AnonConst {
+        ast::AnonConst {
+            id: ast::DUMMY_NODE_ID,
+            value: P(ast::Expr {
+                id: ast::DUMMY_NODE_ID,
+                node: expr,
+                span,
+                attrs: ThinVec::new(),
+            })
+        }
+    }
+
+    fn const_ident(&self, span: Span, ident: ast::Ident) -> ast::AnonConst {
+        self.anon_const(span, ast::ExprKind::Path(None, self.path_ident(span, ident)))
+    }
+
     fn ty_rptr(&self,
                span: Span,
                ty: P<ast::Ty>,
diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs
index 7ef09ce5fbd..6df369133d0 100644
--- a/src/libsyntax/ext/derive.rs
+++ b/src/libsyntax/ext/derive.rs
@@ -1,15 +1,16 @@
-use attr::HasAttrs;
-use ast;
-use source_map::{hygiene, ExpnInfo, ExpnFormat};
-use ext::base::ExtCtxt;
-use ext::build::AstBuilder;
-use parse::parser::PathStyle;
-use symbol::Symbol;
+use crate::attr::HasAttrs;
+use crate::ast;
+use crate::source_map::{hygiene, ExpnInfo, ExpnFormat};
+use crate::ext::base::ExtCtxt;
+use crate::ext::build::AstBuilder;
+use crate::parse::parser::PathStyle;
+use crate::symbol::Symbol;
+
 use syntax_pos::Span;
 
 use rustc_data_structures::fx::FxHashSet;
 
-pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
+pub fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
     let mut result = Vec::new();
     attrs.retain(|attr| {
         if attr.path != "derive" {
@@ -40,7 +41,7 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec
     result
 }
 
-pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T
+pub fn add_derived_markers<T>(cx: &mut ExtCtxt<'_>, span: Span, traits: &[ast::Path], item: &mut T)
     where T: HasAttrs,
 {
     let (mut names, mut pretty_name) = (FxHashSet::default(), "derive(".to_owned());
@@ -57,14 +58,17 @@ pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path]
         call_site: span,
         def_site: None,
         format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)),
-        allow_internal_unstable: true,
+        allow_internal_unstable: Some(vec![
+            Symbol::intern("rustc_attrs"),
+            Symbol::intern("structural_match"),
+        ].into()),
         allow_internal_unsafe: false,
         local_inner_macros: false,
         edition: hygiene::default_edition(),
     });
 
     let span = span.with_ctxt(cx.backtrace());
-    item.map_attrs(|mut attrs| {
+    item.visit_attrs(|attrs| {
         if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) {
             let meta = cx.meta_word(span, Symbol::intern("structural_match"));
             attrs.push(cx.attribute(span, meta));
@@ -73,6 +77,5 @@ pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path]
             let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker"));
             attrs.push(cx.attribute(span, meta));
         }
-        attrs
-    })
+    });
 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 1b4b44270ad..6c90662d658 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -1,32 +1,34 @@
-use ast::{self, Block, Ident, LitKind, NodeId, PatKind, Path};
-use ast::{MacStmtStyle, StmtKind, ItemKind};
-use attr::{self, HasAttrs};
-use source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan};
-use config::StripUnconfigured;
-use errors::{Applicability, FatalError};
-use ext::base::*;
-use ext::derive::{add_derived_markers, collect_derives};
-use ext::hygiene::{self, Mark, SyntaxContext};
-use ext::placeholders::{placeholder, PlaceholderExpander};
-use feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
-use fold;
-use fold::*;
-use parse::{DirectoryOwnership, PResult, ParseSess};
-use parse::token::{self, Token};
-use parse::parser::Parser;
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::Symbol;
-use symbol::keywords;
+use crate::ast::{self, Block, Ident, LitKind, NodeId, PatKind, Path};
+use crate::ast::{MacStmtStyle, StmtKind, ItemKind};
+use crate::attr::{self, HasAttrs};
+use crate::source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan};
+use crate::config::StripUnconfigured;
+use crate::errors::{Applicability, FatalError};
+use crate::ext::base::*;
+use crate::ext::derive::{add_derived_markers, collect_derives};
+use crate::ext::hygiene::{self, Mark, SyntaxContext};
+use crate::ext::placeholders::{placeholder, PlaceholderExpander};
+use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
+use crate::mut_visit::*;
+use crate::parse::{DirectoryOwnership, PResult, ParseSess};
+use crate::parse::token::{self, Token};
+use crate::parse::parser::Parser;
+use crate::ptr::P;
+use crate::symbol::Symbol;
+use crate::symbol::keywords;
+use crate::tokenstream::{TokenStream, TokenTree};
+use crate::visit::{self, Visitor};
+use crate::util::map_in_place::MapInPlace;
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{Span, DUMMY_SP, FileName};
 use syntax_pos::hygiene::ExpnFormat;
-use tokenstream::{TokenStream, TokenTree};
-use visit::{self, Visitor};
 
 use rustc_data_structures::fx::FxHashMap;
 use std::fs;
 use std::io::ErrorKind;
 use std::{iter, mem};
+use std::ops::DerefMut;
 use std::rc::Rc;
 use std::path::PathBuf;
 
@@ -36,8 +38,8 @@ macro_rules! ast_fragments {
             $kind_name:expr;
             // FIXME: HACK: this should be `$(one ...)?` and `$(many ...)?` but `?` macro
             // repetition was removed from 2015 edition in #51587 because of ambiguities.
-            $(one fn $fold_ast:ident; fn $visit_ast:ident;)*
-            $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)*
+            $(one fn $mut_visit_ast:ident; fn $visit_ast:ident;)*
+            $(many fn $flat_map_ast_elt:ident; fn $visit_ast_elt:ident;)*
             fn $make_ast:ident;
         })*
     ) => {
@@ -87,16 +89,20 @@ macro_rules! ast_fragments {
                 }
             })*
 
-            pub fn fold_with<F: Folder>(self, folder: &mut F) -> Self {
+            pub fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
                 match self {
-                    AstFragment::OptExpr(expr) =>
-                        AstFragment::OptExpr(expr.and_then(|expr| folder.fold_opt_expr(expr))),
-                    $($(AstFragment::$Kind(ast) =>
-                        AstFragment::$Kind(folder.$fold_ast(ast)),)*)*
+                    AstFragment::OptExpr(opt_expr) => {
+                        visit_clobber(opt_expr, |opt_expr| {
+                            if let Some(expr) = opt_expr {
+                                vis.filter_map_expr(expr)
+                            } else {
+                                None
+                            }
+                        });
+                    }
+                    $($(AstFragment::$Kind(ast) => vis.$mut_visit_ast(ast),)*)*
                     $($(AstFragment::$Kind(ast) =>
-                        AstFragment::$Kind(ast.into_iter()
-                                              .flat_map(|ast| folder.$fold_ast_elt(ast))
-                                              .collect()),)*)*
+                        ast.flat_map_in_place(|ast| vis.$flat_map_ast_elt(ast)),)*)*
                 }
             }
 
@@ -112,20 +118,20 @@ macro_rules! ast_fragments {
             }
         }
 
-        impl<'a, 'b> Folder for MacroExpander<'a, 'b> {
-            fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
+        impl<'a, 'b> MutVisitor for MacroExpander<'a, 'b> {
+            fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
                 self.expand_fragment(AstFragment::OptExpr(Some(expr))).make_opt_expr()
             }
-            $($(fn $fold_ast(&mut self, ast: $AstTy) -> $AstTy {
-                self.expand_fragment(AstFragment::$Kind(ast)).$make_ast()
+            $($(fn $mut_visit_ast(&mut self, ast: &mut $AstTy) {
+                visit_clobber(ast, |ast| self.expand_fragment(AstFragment::$Kind(ast)).$make_ast());
             })*)*
-            $($(fn $fold_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy {
+            $($(fn $flat_map_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy {
                 self.expand_fragment(AstFragment::$Kind(smallvec![ast_elt])).$make_ast()
             })*)*
         }
 
-        impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> {
-            $(fn $make_ast(self: Box<::ext::tt::macro_rules::ParserAnyMacro<'a>>)
+        impl<'a> MacResult for crate::ext::tt::macro_rules::ParserAnyMacro<'a> {
+            $(fn $make_ast(self: Box<crate::ext::tt::macro_rules::ParserAnyMacro<'a>>)
                            -> Option<$AstTy> {
                 Some(self.make(AstFragmentKind::$Kind).$make_ast())
             })*
@@ -134,23 +140,23 @@ macro_rules! ast_fragments {
 }
 
 ast_fragments! {
-    Expr(P<ast::Expr>) { "expression"; one fn fold_expr; fn visit_expr; fn make_expr; }
-    Pat(P<ast::Pat>) { "pattern"; one fn fold_pat; fn visit_pat; fn make_pat; }
-    Ty(P<ast::Ty>) { "type"; one fn fold_ty; fn visit_ty; fn make_ty; }
+    Expr(P<ast::Expr>) { "expression"; one fn visit_expr; fn visit_expr; fn make_expr; }
+    Pat(P<ast::Pat>) { "pattern"; one fn visit_pat; fn visit_pat; fn make_pat; }
+    Ty(P<ast::Ty>) { "type"; one fn visit_ty; fn visit_ty; fn make_ty; }
     Stmts(SmallVec<[ast::Stmt; 1]>) {
-        "statement"; many fn fold_stmt; fn visit_stmt; fn make_stmts;
+        "statement"; many fn flat_map_stmt; fn visit_stmt; fn make_stmts;
     }
     Items(SmallVec<[P<ast::Item>; 1]>) {
-        "item"; many fn fold_item; fn visit_item; fn make_items;
+        "item"; many fn flat_map_item; fn visit_item; fn make_items;
     }
     TraitItems(SmallVec<[ast::TraitItem; 1]>) {
-        "trait item"; many fn fold_trait_item; fn visit_trait_item; fn make_trait_items;
+        "trait item"; many fn flat_map_trait_item; fn visit_trait_item; fn make_trait_items;
     }
     ImplItems(SmallVec<[ast::ImplItem; 1]>) {
-        "impl item"; many fn fold_impl_item; fn visit_impl_item; fn make_impl_items;
+        "impl item"; many fn flat_map_impl_item; fn visit_impl_item; fn make_impl_items;
     }
     ForeignItems(SmallVec<[ast::ForeignItem; 1]>) {
-        "foreign item"; many fn fold_foreign_item; fn visit_foreign_item; fn make_foreign_items;
+        "foreign item"; many fn flat_map_foreign_item; fn visit_foreign_item; fn make_foreign_items;
     }
 }
 
@@ -298,7 +304,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         self.cx.current_expansion.depth = 0;
 
         // Collect all macro invocations and replace them with placeholders.
-        let (fragment_with_placeholders, mut invocations)
+        let (mut fragment_with_placeholders, mut invocations)
             = self.collect_invocations(input_fragment, &[]);
 
         // Optimization: if we resolve all imports now,
@@ -370,10 +376,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         err.emit();
                     }
 
-                    let item = self.fully_configure(item)
-                        .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs });
-                    let item_with_markers =
-                        add_derived_markers(&mut self.cx, item.span(), &traits, item.clone());
+                    let mut item = self.fully_configure(item);
+                    item.visit_attrs(|attrs| attrs.retain(|a| a.path != "derive"));
+                    let mut item_with_markers = item.clone();
+                    add_derived_markers(&mut self.cx, item.span(), &traits, &mut item_with_markers);
                     let derives = derives.entry(invoc.expansion_data.mark).or_default();
 
                     derives.reserve(traits.len());
@@ -428,7 +434,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                                          expanded_fragment, derives);
             }
         }
-        fragment_with_placeholders.fold_with(&mut placeholder_expander)
+        fragment_with_placeholders.mut_visit_with(&mut placeholder_expander);
+        fragment_with_placeholders
     }
 
     fn resolve_imports(&mut self) {
@@ -437,16 +444,16 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         }
     }
 
-    /// Collect all macro invocations reachable at this time in this AST fragment, and replace
+    /// Collects all macro invocations reachable at this time in this AST fragment, and replace
     /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s.
     /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and
     /// prepares data for resolving paths of macro invocations.
-    fn collect_invocations(&mut self, fragment: AstFragment, derives: &[Mark])
+    fn collect_invocations(&mut self, mut fragment: AstFragment, derives: &[Mark])
                            -> (AstFragment, Vec<Invocation>) {
         // Resolve `$crate`s in the fragment for pretty-printing.
         self.cx.resolver.resolve_dollar_crates(&fragment);
 
-        let (fragment_with_placeholders, invocations) = {
+        let invocations = {
             let mut collector = InvocationCollector {
                 cfg: StripUnconfigured {
                     sess: self.cx.parse_sess,
@@ -456,16 +463,16 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 invocations: Vec::new(),
                 monotonic: self.monotonic,
             };
-            (fragment.fold_with(&mut collector), collector.invocations)
+            fragment.mut_visit_with(&mut collector);
+            collector.invocations
         };
 
         if self.monotonic {
             self.cx.resolver.visit_ast_fragment_with_placeholders(
-                self.cx.current_expansion.mark, &fragment_with_placeholders, derives
-            );
+                self.cx.current_expansion.mark, &fragment, derives);
         }
 
-        (fragment_with_placeholders, invocations)
+        (fragment, invocations)
     }
 
     fn fully_configure(&mut self, item: Annotatable) -> Annotatable {
@@ -477,24 +484,25 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         // we know that fold result vector will contain exactly one element
         match item {
             Annotatable::Item(item) => {
-                Annotatable::Item(cfg.fold_item(item).pop().unwrap())
+                Annotatable::Item(cfg.flat_map_item(item).pop().unwrap())
             }
             Annotatable::TraitItem(item) => {
-                Annotatable::TraitItem(item.map(|item| cfg.fold_trait_item(item).pop().unwrap()))
+                Annotatable::TraitItem(
+                    item.map(|item| cfg.flat_map_trait_item(item).pop().unwrap()))
             }
             Annotatable::ImplItem(item) => {
-                Annotatable::ImplItem(item.map(|item| cfg.fold_impl_item(item).pop().unwrap()))
+                Annotatable::ImplItem(item.map(|item| cfg.flat_map_impl_item(item).pop().unwrap()))
             }
             Annotatable::ForeignItem(item) => {
                 Annotatable::ForeignItem(
-                    item.map(|item| cfg.fold_foreign_item(item).pop().unwrap())
+                    item.map(|item| cfg.flat_map_foreign_item(item).pop().unwrap())
                 )
             }
             Annotatable::Stmt(stmt) => {
-                Annotatable::Stmt(stmt.map(|stmt| cfg.fold_stmt(stmt).pop().unwrap()))
+                Annotatable::Stmt(stmt.map(|stmt| cfg.flat_map_stmt(stmt).pop().unwrap()))
             }
-            Annotatable::Expr(expr) => {
-                Annotatable::Expr(cfg.fold_expr(expr))
+            Annotatable::Expr(mut expr) => {
+                Annotatable::Expr({ cfg.visit_expr(&mut expr); expr })
             }
         }
     }
@@ -536,7 +544,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                          invoc: Invocation,
                          ext: &SyntaxExtension)
                          -> Option<AstFragment> {
-        let (attr, item) = match invoc.kind {
+        let (attr, mut item) = match invoc.kind {
             InvocationKind::Attr { attr, item, .. } => (attr?, item),
             _ => unreachable!(),
         };
@@ -550,7 +558,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             call_site: attr.span,
             def_site: None,
             format: MacroAttribute(Symbol::intern(&attr.path.to_string())),
-            allow_internal_unstable: false,
+            allow_internal_unstable: None,
             allow_internal_unsafe: false,
             local_inner_macros: false,
             edition: ext.edition(),
@@ -559,7 +567,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         match *ext {
             NonMacroAttr { .. } => {
                 attr::mark_known(&attr);
-                let item = item.map_attrs(|mut attrs| { attrs.push(attr); attrs });
+                item.visit_attrs(|attrs| attrs.push(attr));
                 Some(invoc.fragment_kind.expect_from_annotatables(iter::once(item)))
             }
             MultiModifier(ref mac) => {
@@ -717,7 +725,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 // don't stability-check macros in the same crate
                 // (the only time this is null is for syntax extensions registered as macros)
                 if def_site_span.map_or(false, |def_span| !crate_span.contains(def_span))
-                    && !span.allows_unstable() && this.cx.ecfg.features.map_or(true, |feats| {
+                    && !span.allows_unstable(&feature.as_str())
+                    && this.cx.ecfg.features.map_or(true, |feats| {
                     // macro features will count as lib features
                     !feats.declared_lib_features.iter().any(|&(feat, _)| feat == feature)
                 }) {
@@ -749,7 +758,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         let opt_expanded = match *ext {
             DeclMacro { ref expander, def_info, edition, .. } => {
                 if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
-                                                                    false, false, false, None,
+                                                                    None, false, false, None,
                                                                     edition) {
                     dummy_span
                 } else {
@@ -760,14 +769,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             NormalTT {
                 ref expander,
                 def_info,
-                allow_internal_unstable,
+                ref allow_internal_unstable,
                 allow_internal_unsafe,
                 local_inner_macros,
                 unstable_feature,
                 edition,
             } => {
                 if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
-                                                                    allow_internal_unstable,
+                                                                    allow_internal_unstable.clone(),
                                                                     allow_internal_unsafe,
                                                                     local_inner_macros,
                                                                     unstable_feature,
@@ -783,7 +792,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 }
             }
 
-            IdentTT(ref expander, tt_span, allow_internal_unstable) => {
+            IdentTT { ref expander, span: tt_span, ref allow_internal_unstable } => {
                 if ident.name == keywords::Invalid.name() {
                     self.cx.span_err(path.span,
                                     &format!("macro {}! expects an ident argument", path));
@@ -794,7 +803,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         call_site: span,
                         def_site: tt_span,
                         format: macro_bang_format(path),
-                        allow_internal_unstable,
+                        allow_internal_unstable: allow_internal_unstable.clone(),
                         allow_internal_unsafe: false,
                         local_inner_macros: false,
                         edition: hygiene::default_edition(),
@@ -819,7 +828,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 kind.dummy(span)
             }
 
-            SyntaxExtension::ProcMacro { ref expander, allow_internal_unstable, edition } => {
+            SyntaxExtension::ProcMacro { ref expander, ref allow_internal_unstable, edition } => {
                 if ident.name != keywords::Invalid.name() {
                     let msg =
                         format!("macro {}! expects no ident argument, given '{}'", path, ident);
@@ -835,7 +844,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         def_site: None,
                         format: macro_bang_format(path),
                         // FIXME probably want to follow macro_rules macros here.
-                        allow_internal_unstable,
+                        allow_internal_unstable: allow_internal_unstable.clone(),
                         allow_internal_unsafe: false,
                         local_inner_macros: false,
                         edition,
@@ -910,7 +919,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             call_site: span,
             def_site: None,
             format: MacroAttribute(pretty_name),
-            allow_internal_unstable: false,
+            allow_internal_unstable: None,
             allow_internal_unsafe: false,
             local_inner_macros: false,
             edition: ext.edition(),
@@ -929,7 +938,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 Some(invoc.fragment_kind.expect_from_annotatables(items))
             }
             BuiltinDerive(func) => {
-                expn_info.allow_internal_unstable = true;
+                expn_info.allow_internal_unstable = Some(vec![
+                    Symbol::intern("rustc_attrs"),
+                    Symbol::intern("derive_clone_copy"),
+                    Symbol::intern("derive_eq"),
+                    Symbol::intern("libstd_sys_internals"), // RustcDeserialize and RustcSerialize
+                ].into());
                 invoc.expansion_data.mark.set_expn_info(expn_info);
                 let span = span.with_ctxt(self.cx.backtrace());
                 let mut items = Vec::new();
@@ -1114,34 +1128,32 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
     }
 
     /// If `item` is an attr invocation, remove and return the macro attribute and derive traits.
-    fn classify_item<T>(&mut self, mut item: T)
-                        -> (Option<ast::Attribute>, Vec<Path>, T, /* after_derive */ bool)
+    fn classify_item<T>(&mut self, item: &mut T)
+                        -> (Option<ast::Attribute>, Vec<Path>, /* after_derive */ bool)
         where T: HasAttrs,
     {
         let (mut attr, mut traits, mut after_derive) = (None, Vec::new(), false);
 
-        item = item.map_attrs(|mut attrs| {
+        item.visit_attrs(|mut attrs| {
             attr = self.find_attr_invoc(&mut attrs, &mut after_derive);
             traits = collect_derives(&mut self.cx, &mut attrs);
-            attrs
         });
 
-        (attr, traits, item, after_derive)
+        (attr, traits, after_derive)
     }
 
-    /// Alternative of `classify_item()` that ignores `#[derive]` so invocations fallthrough
+    /// Alternative to `classify_item()` that ignores `#[derive]` so invocations fallthrough
     /// to the unused-attributes lint (making it an error on statements and expressions
     /// is a breaking change)
-    fn classify_nonitem<T: HasAttrs>(&mut self, mut item: T)
-                                     -> (Option<ast::Attribute>, T, /* after_derive */ bool) {
+    fn classify_nonitem<T: HasAttrs>(&mut self, nonitem: &mut T)
+                                     -> (Option<ast::Attribute>, /* after_derive */ bool) {
         let (mut attr, mut after_derive) = (None, false);
 
-        item = item.map_attrs(|mut attrs| {
+        nonitem.visit_attrs(|mut attrs| {
             attr = self.find_attr_invoc(&mut attrs, &mut after_derive);
-            attrs
         });
 
-        (attr, item, after_derive)
+        (attr, after_derive)
     }
 
     fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
@@ -1174,14 +1186,14 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
     }
 }
 
-impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
-    fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
-        let expr = self.cfg.configure_expr(expr);
-        expr.map(|mut expr| {
-            expr.node = self.cfg.configure_expr_kind(expr.node);
+impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
+    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
+        self.cfg.configure_expr(expr);
+        visit_clobber(expr.deref_mut(), |mut expr| {
+            self.cfg.configure_expr_kind(&mut expr.node);
 
             // ignore derives so they remain unused
-            let (attr, expr, after_derive) = self.classify_nonitem(expr);
+            let (attr, after_derive) = self.classify_nonitem(&mut expr);
 
             if attr.is_some() {
                 // Collect the invoc regardless of whether or not attributes are permitted here
@@ -1190,7 +1202,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
 
                 // AstFragmentKind::Expr requires the macro to emit an expression.
                 return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)),
-                                         AstFragmentKind::Expr, after_derive)
+                                          AstFragmentKind::Expr, after_derive)
                     .make_expr()
                     .into_inner()
             }
@@ -1201,18 +1213,19 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                     .make_expr()
                     .into_inner()
             } else {
-                noop_fold_expr(expr, self)
+                noop_visit_expr(&mut expr, self);
+                expr
             }
-        })
+        });
     }
 
-    fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
+    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
         let expr = configure!(self, expr);
         expr.filter_map(|mut expr| {
-            expr.node = self.cfg.configure_expr_kind(expr.node);
+            self.cfg.configure_expr_kind(&mut expr.node);
 
             // Ignore derives so they remain unused.
-            let (attr, expr, after_derive) = self.classify_nonitem(expr);
+            let (attr, after_derive) = self.classify_nonitem(&mut expr);
 
             if attr.is_some() {
                 attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a));
@@ -1229,47 +1242,45 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                     .make_opt_expr()
                     .map(|expr| expr.into_inner())
             } else {
-                Some(noop_fold_expr(expr, self))
+                Some({ noop_visit_expr(&mut expr, self); expr })
             }
         })
     }
 
-    fn fold_pat(&mut self, pat: P<ast::Pat>) -> P<ast::Pat> {
-        let pat = self.cfg.configure_pat(pat);
+    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
+        self.cfg.configure_pat(pat);
         match pat.node {
             PatKind::Mac(_) => {}
-            _ => return noop_fold_pat(pat, self),
+            _ => return noop_visit_pat(pat, self),
         }
 
-        pat.and_then(|pat| match pat.node {
-            PatKind::Mac(mac) => self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(),
-            _ => unreachable!(),
-        })
+        visit_clobber(pat, |mut pat| {
+            match mem::replace(&mut pat.node, PatKind::Wild) {
+                PatKind::Mac(mac) =>
+                    self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(),
+                _ => unreachable!(),
+            }
+        });
     }
 
-    fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
-        let mut stmt = match self.cfg.configure_stmt(stmt) {
-            Some(stmt) => stmt,
-            None => return SmallVec::new(),
-        };
+    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
+        let mut stmt = configure!(self, stmt);
 
         // we'll expand attributes on expressions separately
         if !stmt.is_expr() {
-            let (attr, derives, stmt_, after_derive) = if stmt.is_item() {
-                self.classify_item(stmt)
+            let (attr, derives, after_derive) = if stmt.is_item() {
+                self.classify_item(&mut stmt)
             } else {
                 // ignore derives on non-item statements so it falls through
                 // to the unused-attributes lint
-                let (attr, stmt, after_derive) = self.classify_nonitem(stmt);
-                (attr, vec![], stmt, after_derive)
+                let (attr, after_derive) = self.classify_nonitem(&mut stmt);
+                (attr, vec![], after_derive)
             };
 
             if attr.is_some() || !derives.is_empty() {
-                return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt_)),
+                return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt)),
                                          AstFragmentKind::Stmts, after_derive).make_stmts();
             }
-
-            stmt = stmt_;
         }
 
         if let StmtKind::Mac(mac) = stmt.node {
@@ -1291,24 +1302,23 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
 
         // The placeholder expander gives ids to statements, so we avoid folding the id here.
         let ast::Stmt { id, node, span } = stmt;
-        noop_fold_stmt_kind(node, self).into_iter().map(|node| {
+        noop_flat_map_stmt_kind(node, self).into_iter().map(|node| {
             ast::Stmt { id, node, span }
         }).collect()
 
     }
 
-    fn fold_block(&mut self, block: P<Block>) -> P<Block> {
+    fn visit_block(&mut self, block: &mut P<Block>) {
         let old_directory_ownership = self.cx.current_expansion.directory_ownership;
         self.cx.current_expansion.directory_ownership = DirectoryOwnership::UnownedViaBlock;
-        let result = noop_fold_block(block, self);
+        noop_visit_block(block, self);
         self.cx.current_expansion.directory_ownership = old_directory_ownership;
-        result
     }
 
-    fn fold_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
-        let item = configure!(self, item);
+    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+        let mut item = configure!(self, item);
 
-        let (attr, traits, item, after_derive) = self.classify_item(item);
+        let (attr, traits, after_derive) = self.classify_item(&mut item);
         if attr.is_some() || !traits.is_empty() {
             return self.collect_attr(attr, traits, Annotatable::Item(item),
                                      AstFragmentKind::Items, after_derive).make_items();
@@ -1330,7 +1340,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
             }
             ast::ItemKind::Mod(ast::Mod { inner, .. }) => {
                 if item.ident == keywords::Invalid.ident() {
-                    return noop_fold_item(item, self);
+                    return noop_flat_map_item(item, self);
                 }
 
                 let orig_directory_ownership = self.cx.current_expansion.directory_ownership;
@@ -1370,20 +1380,20 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
 
                 let orig_module =
                     mem::replace(&mut self.cx.current_expansion.module, Rc::new(module));
-                let result = noop_fold_item(item, self);
+                let result = noop_flat_map_item(item, self);
                 self.cx.current_expansion.module = orig_module;
                 self.cx.current_expansion.directory_ownership = orig_directory_ownership;
                 result
             }
 
-            _ => noop_fold_item(item, self),
+            _ => noop_flat_map_item(item, self),
         }
     }
 
-    fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
-        let item = configure!(self, item);
+    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
+        let mut item = configure!(self, item);
 
-        let (attr, traits, item, after_derive) = self.classify_item(item);
+        let (attr, traits, after_derive) = self.classify_item(&mut item);
         if attr.is_some() || !traits.is_empty() {
             return self.collect_attr(attr, traits, Annotatable::TraitItem(P(item)),
                                      AstFragmentKind::TraitItems, after_derive).make_trait_items()
@@ -1395,14 +1405,14 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                 self.check_attributes(&attrs);
                 self.collect_bang(mac, span, AstFragmentKind::TraitItems).make_trait_items()
             }
-            _ => fold::noop_fold_trait_item(item, self),
+            _ => noop_flat_map_trait_item(item, self),
         }
     }
 
-    fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
-        let item = configure!(self, item);
+    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
+        let mut item = configure!(self, item);
 
-        let (attr, traits, item, after_derive) = self.classify_item(item);
+        let (attr, traits, after_derive) = self.classify_item(&mut item);
         if attr.is_some() || !traits.is_empty() {
             return self.collect_attr(attr, traits, Annotatable::ImplItem(P(item)),
                                      AstFragmentKind::ImplItems, after_derive).make_impl_items();
@@ -1414,30 +1424,34 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                 self.check_attributes(&attrs);
                 self.collect_bang(mac, span, AstFragmentKind::ImplItems).make_impl_items()
             }
-            _ => fold::noop_fold_impl_item(item, self),
+            _ => noop_flat_map_impl_item(item, self),
         }
     }
 
-    fn fold_ty(&mut self, ty: P<ast::Ty>) -> P<ast::Ty> {
-        let ty = match ty.node {
-            ast::TyKind::Mac(_) => ty.into_inner(),
-            _ => return fold::noop_fold_ty(ty, self),
+    fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
+        match ty.node {
+            ast::TyKind::Mac(_) => {}
+            _ => return noop_visit_ty(ty, self),
         };
 
-        match ty.node {
-            ast::TyKind::Mac(mac) => self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(),
-            _ => unreachable!(),
-        }
+        visit_clobber(ty, |mut ty| {
+            match mem::replace(&mut ty.node, ast::TyKind::Err) {
+                ast::TyKind::Mac(mac) =>
+                    self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(),
+                _ => unreachable!(),
+            }
+        });
     }
 
-    fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {
-        noop_fold_foreign_mod(self.cfg.configure_foreign_mod(foreign_mod), self)
+    fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
+        self.cfg.configure_foreign_mod(foreign_mod);
+        noop_visit_foreign_mod(foreign_mod, self);
     }
 
-    fn fold_foreign_item(&mut self, foreign_item: ast::ForeignItem)
+    fn flat_map_foreign_item(&mut self, mut foreign_item: ast::ForeignItem)
         -> SmallVec<[ast::ForeignItem; 1]>
     {
-        let (attr, traits, foreign_item, after_derive) = self.classify_item(foreign_item);
+        let (attr, traits, after_derive) = self.classify_item(&mut foreign_item);
 
         if attr.is_some() || !traits.is_empty() {
             return self.collect_attr(attr, traits, Annotatable::ForeignItem(P(foreign_item)),
@@ -1451,38 +1465,41 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                 .make_foreign_items();
         }
 
-        noop_fold_foreign_item(foreign_item, self)
+        noop_flat_map_foreign_item(foreign_item, self)
     }
 
-    fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
+    fn visit_item_kind(&mut self, item: &mut ast::ItemKind) {
         match item {
-            ast::ItemKind::MacroDef(..) => item,
-            _ => noop_fold_item_kind(self.cfg.configure_item_kind(item), self),
+            ast::ItemKind::MacroDef(..) => {}
+            _ => {
+                self.cfg.configure_item_kind(item);
+                noop_visit_item_kind(item, self);
+            }
         }
     }
 
-    fn fold_generic_param(&mut self, param: ast::GenericParam) -> ast::GenericParam {
+    fn visit_generic_param(&mut self, param: &mut ast::GenericParam) {
         self.cfg.disallow_cfg_on_generic_param(&param);
-        noop_fold_generic_param(param, self)
+        noop_visit_generic_param(param, self)
     }
 
-    fn fold_attribute(&mut self, at: ast::Attribute) -> Option<ast::Attribute> {
+    fn visit_attribute(&mut self, at: &mut ast::Attribute) {
         // turn `#[doc(include="filename")]` attributes into `#[doc(include(file="filename",
         // contents="file contents")]` attributes
         if !at.check_name("doc") {
-            return noop_fold_attribute(at, self);
+            return noop_visit_attribute(at, self);
         }
 
         if let Some(list) = at.meta_item_list() {
             if !list.iter().any(|it| it.check_name("include")) {
-                return noop_fold_attribute(at, self);
+                return noop_visit_attribute(at, self);
             }
 
             let mut items = vec![];
 
-            for it in list {
+            for mut it in list {
                 if !it.check_name("include") {
-                    items.push(noop_fold_meta_list_item(it, self));
+                    items.push({ noop_visit_meta_list_item(&mut it, self); it });
                     continue;
                 }
 
@@ -1491,7 +1508,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                     self.check_attribute(&at);
                     if self.cx.parse_sess.span_diagnostic.err_count() > err_count {
                         // avoid loading the file if they haven't enabled the feature
-                        return noop_fold_attribute(at, self);
+                        return noop_visit_attribute(at, self);
                     }
 
                     let filename = self.cx.root_path.join(file.to_string());
@@ -1586,22 +1603,18 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
 
             let meta = attr::mk_list_item(DUMMY_SP, Ident::from_str("doc"), items);
             match at.style {
-                ast::AttrStyle::Inner =>
-                    Some(attr::mk_spanned_attr_inner(at.span, at.id, meta)),
-                ast::AttrStyle::Outer =>
-                    Some(attr::mk_spanned_attr_outer(at.span, at.id, meta)),
+                ast::AttrStyle::Inner => *at = attr::mk_spanned_attr_inner(at.span, at.id, meta),
+                ast::AttrStyle::Outer => *at = attr::mk_spanned_attr_outer(at.span, at.id, meta),
             }
         } else {
-            noop_fold_attribute(at, self)
+            noop_visit_attribute(at, self)
         }
     }
 
-    fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId {
+    fn visit_id(&mut self, id: &mut ast::NodeId) {
         if self.monotonic {
-            assert_eq!(id, ast::DUMMY_NODE_ID);
-            self.cx.resolver.next_node_id()
-        } else {
-            id
+            debug_assert_eq!(*id, ast::DUMMY_NODE_ID);
+            *id = self.cx.resolver.next_node_id()
         }
     }
 }
@@ -1666,12 +1679,12 @@ impl<'feat> ExpansionConfig<'feat> {
 #[derive(Debug)]
 pub struct Marker(pub Mark);
 
-impl Folder for Marker {
-    fn new_span(&mut self, span: Span) -> Span {
-        span.apply_mark(self.0)
+impl MutVisitor for Marker {
+    fn visit_span(&mut self, span: &mut Span) {
+        *span = span.apply_mark(self.0)
     }
 
-    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
-        noop_fold_mac(mac, self)
+    fn visit_mac(&mut self, mac: &mut ast::Mac) {
+        noop_visit_mac(mac, self)
     }
 }
diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs
index 3b0402d910a..3e60dd81a3b 100644
--- a/src/libsyntax/ext/placeholders.rs
+++ b/src/libsyntax/ext/placeholders.rs
@@ -1,15 +1,15 @@
-use ast::{self, NodeId};
-use source_map::{DUMMY_SP, dummy_spanned};
-use ext::base::ExtCtxt;
-use ext::expand::{AstFragment, AstFragmentKind};
-use ext::hygiene::Mark;
-use tokenstream::TokenStream;
-use fold::*;
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::keywords;
-use ThinVec;
-use util::move_map::MoveMap;
+use crate::ast::{self, NodeId};
+use crate::source_map::{DUMMY_SP, dummy_spanned};
+use crate::ext::base::ExtCtxt;
+use crate::ext::expand::{AstFragment, AstFragmentKind};
+use crate::ext::hygiene::Mark;
+use crate::tokenstream::TokenStream;
+use crate::mut_visit::*;
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::ThinVec;
+
+use smallvec::{smallvec, SmallVec};
 
 use rustc_data_structures::fx::FxHashMap;
 
@@ -85,8 +85,8 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> {
         }
     }
 
-    pub fn add(&mut self, id: ast::NodeId, fragment: AstFragment, derives: Vec<Mark>) {
-        let mut fragment = fragment.fold_with(self);
+    pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment, derives: Vec<Mark>) {
+        fragment.mut_visit_with(self);
         if let AstFragment::Items(mut items) = fragment {
             for derive in derives {
                 match self.remove(NodeId::placeholder_from_mark(derive)) {
@@ -104,56 +104,56 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> {
     }
 }
 
-impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> {
-    fn fold_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> {
+    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
         match item.node {
             ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(),
             ast::ItemKind::MacroDef(_) => return smallvec![item],
             _ => {}
         }
 
-        noop_fold_item(item, self)
+        noop_flat_map_item(item, self)
     }
 
-    fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
+    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
         match item.node {
             ast::TraitItemKind::Macro(_) => self.remove(item.id).make_trait_items(),
-            _ => noop_fold_trait_item(item, self),
+            _ => noop_flat_map_trait_item(item, self),
         }
     }
 
-    fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
+    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
         match item.node {
             ast::ImplItemKind::Macro(_) => self.remove(item.id).make_impl_items(),
-            _ => noop_fold_impl_item(item, self),
+            _ => noop_flat_map_impl_item(item, self),
         }
     }
 
-    fn fold_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> {
+    fn flat_map_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> {
         match item.node {
             ast::ForeignItemKind::Macro(_) => self.remove(item.id).make_foreign_items(),
-            _ => noop_fold_foreign_item(item, self),
+            _ => noop_flat_map_foreign_item(item, self),
         }
     }
 
-    fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
+    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
         match expr.node {
-            ast::ExprKind::Mac(_) => self.remove(expr.id).make_expr(),
-            _ => expr.map(|expr| noop_fold_expr(expr, self)),
+            ast::ExprKind::Mac(_) => *expr = self.remove(expr.id).make_expr(),
+            _ => noop_visit_expr(expr, self),
         }
     }
 
-    fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
+    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
         match expr.node {
             ast::ExprKind::Mac(_) => self.remove(expr.id).make_opt_expr(),
-            _ => noop_fold_opt_expr(expr, self),
+            _ => noop_filter_map_expr(expr, self),
         }
     }
 
-    fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
+    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
         let (style, mut stmts) = match stmt.node {
             ast::StmtKind::Mac(mac) => (mac.1, self.remove(stmt.id).make_stmts()),
-            _ => return noop_fold_stmt(stmt, self),
+            _ => return noop_flat_map_stmt(stmt, self),
         };
 
         if style == ast::MacStmtStyle::Semicolon {
@@ -165,49 +165,40 @@ impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> {
         stmts
     }
 
-    fn fold_pat(&mut self, pat: P<ast::Pat>) -> P<ast::Pat> {
+    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
         match pat.node {
-            ast::PatKind::Mac(_) => self.remove(pat.id).make_pat(),
-            _ => noop_fold_pat(pat, self),
+            ast::PatKind::Mac(_) => *pat = self.remove(pat.id).make_pat(),
+            _ => noop_visit_pat(pat, self),
         }
     }
 
-    fn fold_ty(&mut self, ty: P<ast::Ty>) -> P<ast::Ty> {
+    fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
         match ty.node {
-            ast::TyKind::Mac(_) => self.remove(ty.id).make_ty(),
-            _ => noop_fold_ty(ty, self),
+            ast::TyKind::Mac(_) => *ty = self.remove(ty.id).make_ty(),
+            _ => noop_visit_ty(ty, self),
         }
     }
 
-    fn fold_block(&mut self, block: P<ast::Block>) -> P<ast::Block> {
-        noop_fold_block(block, self).map(|mut block| {
-            let mut remaining_stmts = block.stmts.len();
+    fn visit_block(&mut self, block: &mut P<ast::Block>) {
+        noop_visit_block(block, self);
 
-            block.stmts = block.stmts.move_flat_map(|mut stmt| {
-                remaining_stmts -= 1;
-
-                if self.monotonic {
-                    assert_eq!(stmt.id, ast::DUMMY_NODE_ID);
-                    stmt.id = self.cx.resolver.next_node_id();
-                }
-
-                Some(stmt)
-            });
-
-            block
-        })
+        for stmt in block.stmts.iter_mut() {
+            if self.monotonic {
+                assert_eq!(stmt.id, ast::DUMMY_NODE_ID);
+                stmt.id = self.cx.resolver.next_node_id();
+            }
+        }
     }
 
-    fn fold_mod(&mut self, module: ast::Mod) -> ast::Mod {
-        let mut module = noop_fold_mod(module, self);
-        module.items = module.items.move_flat_map(|item| match item.node {
-            ast::ItemKind::Mac(_) if !self.cx.ecfg.keep_macs => None, // remove macro definitions
-            _ => Some(item),
+    fn visit_mod(&mut self, module: &mut ast::Mod) {
+        noop_visit_mod(module, self);
+        module.items.retain(|item| match item.node {
+            ast::ItemKind::Mac(_) if !self.cx.ecfg.keep_macs => false, // remove macro definitions
+            _ => true,
         });
-        module
     }
 
-    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
-        mac
+    fn visit_mac(&mut self, _mac: &mut ast::Mac) {
+        // Do nothing.
     }
 }
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index e63042a4208..549de1628eb 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -1,15 +1,14 @@
-use ast;
-use syntax_pos::{self, Pos, Span, FileName};
-use ext::base::*;
-use ext::base;
-use ext::build::AstBuilder;
-use parse::{token, DirectoryOwnership};
-use parse;
-use print::pprust;
-use ptr::P;
+use crate::ast;
+use crate::ext::base::{self, *};
+use crate::ext::build::AstBuilder;
+use crate::parse::{self, token, DirectoryOwnership};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::symbol::Symbol;
+use crate::tokenstream;
+
 use smallvec::SmallVec;
-use symbol::Symbol;
-use tokenstream;
+use syntax_pos::{self, Pos, Span, FileName};
 
 use std::fs;
 use std::io::ErrorKind;
@@ -21,7 +20,7 @@ use rustc_data_structures::sync::Lrc;
 // a given file into the current one.
 
 /// line!(): expands to the current line number
-pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "line!");
 
@@ -32,7 +31,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 }
 
 /* column!(): expands to the current column number */
-pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "column!");
 
@@ -43,9 +42,9 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 }
 
 /* __rust_unstable_column!(): expands to the current column number */
-pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_column_gated(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
-    if sp.allows_unstable() {
+    if sp.allows_unstable("__rust_unstable_column") {
         expand_column(cx, sp, tts)
     } else {
         cx.span_fatal(sp, "the __rust_unstable_column macro is unstable");
@@ -55,7 +54,7 @@ pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
 /// file!(): expands to the current filename */
 /// The source_file (`loc.file`) contains a bunch more information we could spit
 /// out if we wanted.
-pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "file!");
 
@@ -64,13 +63,13 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
     base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
 }
 
-pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                         -> Box<dyn base::MacResult+'static> {
     let s = pprust::tts_to_string(tts);
     base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
 }
 
-pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "module_path!");
     let mod_path = &cx.current_expansion.module.mod_path;
@@ -82,7 +81,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 /// include! : parse the given file as an expr
 /// This is generally a bad idea because it's going to behave
 /// unhygienically.
-pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                            -> Box<dyn base::MacResult+'cx> {
     let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
         Some(f) => f,
@@ -120,7 +119,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T
 }
 
 // include_str! : read the given file, insert it as a literal string expr
-pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                           -> Box<dyn base::MacResult+'static> {
     let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
         Some(f) => f,
@@ -148,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
     }
 }
 
-pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                             -> Box<dyn base::MacResult+'static> {
     let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
         Some(f) => f,
@@ -178,7 +177,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
 
 // resolve a file-system path to an absolute file-system path (if it
 // isn't already)
-fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf {
+fn res_rel_file(cx: &mut ExtCtxt<'_>, sp: syntax_pos::Span, arg: String) -> PathBuf {
     let arg = PathBuf::from(arg);
     // Relative paths are resolved relative to the file in which they are found
     // after macro expansion (that is, they are unhygienic).
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index b4003ac729a..d4ea3b81a60 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -1,4 +1,4 @@
-//! This is an NFA-based parser, which calls out to the main rust parser for named nonterminals
+//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals
 //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
 //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
 //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
@@ -22,7 +22,7 @@
 //!
 //! As it processes them, it fills up `eof_items` with threads that would be valid if
 //! the macro invocation is now over, `bb_items` with threads that are waiting on
-//! a Rust nonterminal like `$e:expr`, and `next_items` with threads that are waiting
+//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
 //! on a particular token. Most of the logic concerns moving the · through the
 //! repetitions indicated by Kleene stars. The rules for moving the · without
 //! consuming any input are called epsilon transitions. It only advances or calls
@@ -70,21 +70,22 @@
 //! eof: [a $( a )* a b ·]
 //! ```
 
-pub use self::NamedMatch::*;
-pub use self::ParseResult::*;
-use self::TokenTreeOrTokenTreeSlice::*;
-
-use ast::Ident;
+pub use NamedMatch::*;
+pub use ParseResult::*;
+use TokenTreeOrTokenTreeSlice::*;
+
+use crate::ast::Ident;
+use crate::errors::FatalError;
+use crate::ext::tt::quoted::{self, TokenTree};
+use crate::parse::{Directory, ParseSess};
+use crate::parse::parser::{Parser, PathStyle};
+use crate::parse::token::{self, DocComment, Nonterminal, Token};
+use crate::print::pprust;
+use crate::symbol::keywords;
+use crate::tokenstream::{DelimSpan, TokenStream};
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{self, Span};
-use errors::FatalError;
-use ext::tt::quoted::{self, TokenTree};
-use parse::{Directory, ParseSess};
-use parse::parser::{Parser, PathStyle};
-use parse::token::{self, DocComment, Nonterminal, Token};
-use print::pprust;
-use smallvec::SmallVec;
-use symbol::keywords;
-use tokenstream::{DelimSpan, TokenStream};
 
 use rustc_data_structures::fx::FxHashMap;
 use std::collections::hash_map::Entry::{Occupied, Vacant};
@@ -215,7 +216,7 @@ struct MatcherPos<'root, 'tt: 'root> {
 }
 
 impl<'root, 'tt> MatcherPos<'root, 'tt> {
-    /// Add `m` as a named match for the `idx`-th metavar.
+    /// Adds `m` as a named match for the `idx`-th metavar.
     fn push_match(&mut self, idx: usize, m: NamedMatch) {
         let matches = Rc::make_mut(&mut self.matches[idx]);
         matches.push(m);
@@ -303,7 +304,7 @@ fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
     }.into_boxed_slice()
 }
 
-/// Generate the top-level matcher position in which the "dot" is before the first token of the
+/// Generates the top-level matcher position in which the "dot" is before the first token of the
 /// matcher `ms` and we are going to start matching at the span `open` in the source.
 fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> {
     let match_idx_hi = count_names(ms);
@@ -336,7 +337,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
 
 /// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
 /// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
 /// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
 /// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
 ///
@@ -413,7 +414,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
     Success(ret_val)
 }
 
-/// Generate an appropriate parsing failure message. For EOF, this is "unexpected end...". For
+/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
 /// other tokens, this is "unexpected token...".
 pub fn parse_failure_msg(tok: Token) -> String {
     match tok {
@@ -425,7 +426,7 @@ pub fn parse_failure_msg(tok: Token) -> String {
     }
 }
 
-/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
+/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
 fn token_name_eq(t1: &Token, t2: &Token) -> bool {
     if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
         id1.name == id2.name && is_raw1 == is_raw2
@@ -649,7 +650,7 @@ pub fn parse(
     sess: &ParseSess,
     tts: TokenStream,
     ms: &[TokenTree],
-    directory: Option<Directory>,
+    directory: Option<Directory<'_>>,
     recurse_into_modules: bool,
 ) -> NamedParseResult {
     // Create a parser that can be used for the "black box" parts.
@@ -879,7 +880,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
     }
 }
 
-/// A call to the "black-box" parser to parse some rust nonterminal.
+/// A call to the "black-box" parser to parse some Rust non-terminal.
 ///
 /// # Parameters
 ///
@@ -890,7 +891,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
 ///
 /// # Returns
 ///
-/// The parsed nonterminal.
+/// The parsed non-terminal.
 fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
     if name == "tt" {
         return token::NtTT(p.parse_token_tree());
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 176575b67ea..f4e0041c862 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -1,29 +1,31 @@
-use {ast, attr};
-use syntax_pos::{Span, DUMMY_SP};
-use edition::Edition;
-use errors::FatalError;
-use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
-use ext::base::{NormalTT, TTMacroExpander};
-use ext::expand::{AstFragment, AstFragmentKind};
-use ext::tt::macro_parser::{Success, Error, Failure};
-use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
-use ext::tt::macro_parser::{parse, parse_failure_msg};
-use ext::tt::quoted;
-use ext::tt::transcribe::transcribe;
-use feature_gate::Features;
-use parse::{Directory, ParseSess};
-use parse::parser::Parser;
-use parse::token::{self, NtTT};
-use parse::token::Token::*;
-use symbol::Symbol;
-use tokenstream::{DelimSpan, TokenStream, TokenTree};
-
-use rustc_data_structures::fx::FxHashMap;
+use crate::{ast, attr};
+use crate::edition::Edition;
+use crate::errors::FatalError;
+use crate::ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
+use crate::ext::base::{NormalTT, TTMacroExpander};
+use crate::ext::expand::{AstFragment, AstFragmentKind};
+use crate::ext::tt::macro_parser::{Success, Error, Failure};
+use crate::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
+use crate::ext::tt::macro_parser::{parse, parse_failure_msg};
+use crate::ext::tt::quoted;
+use crate::ext::tt::transcribe::transcribe;
+use crate::feature_gate::Features;
+use crate::parse::{Directory, ParseSess};
+use crate::parse::parser::Parser;
+use crate::parse::token::{self, NtTT};
+use crate::parse::token::Token::*;
+use crate::symbol::Symbol;
+use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
+
+use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
+use log::debug;
+
+use rustc_data_structures::fx::{FxHashMap};
 use std::borrow::Cow;
 use std::collections::hash_map::Entry;
 
 use rustc_data_structures::sync::Lrc;
-use errors::Applicability;
+use crate::errors::Applicability;
 
 const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
     `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, \
@@ -91,7 +93,7 @@ struct MacroRulesMacroExpander {
 impl TTMacroExpander for MacroRulesMacroExpander {
     fn expand<'cx>(
         &self,
-        cx: &'cx mut ExtCtxt,
+        cx: &'cx mut ExtCtxt<'_>,
         sp: Span,
         input: TokenStream,
         def_span: Option<Span>,
@@ -109,13 +111,13 @@ impl TTMacroExpander for MacroRulesMacroExpander {
     }
 }
 
-fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
+fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) {
     let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
     cx.expansions.entry(sp).or_default().push(message);
 }
 
 /// Given `lhses` and `rhses`, this is the new macro we create
-fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
+fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
                           sp: Span,
                           def_span: Option<Span>,
                           name: ast::Ident,
@@ -244,8 +246,12 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
 // Holy self-referential!
 
 /// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: Edition)
-               -> SyntaxExtension {
+pub fn compile(
+    sess: &ParseSess,
+    features: &Features,
+    def: &ast::Item,
+    edition: Edition
+) -> SyntaxExtension {
     let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
     let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
 
@@ -353,7 +359,13 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
 
     // don't abort iteration early, so that errors for multiple lhses can be reported
     for lhs in &lhses {
-        valid &= check_lhs_no_empty_seq(sess, &[lhs.clone()])
+        valid &= check_lhs_no_empty_seq(sess, &[lhs.clone()]);
+        valid &= check_lhs_duplicate_matcher_bindings(
+            sess,
+            &[lhs.clone()],
+            &mut FxHashMap::default(),
+            def.id
+        );
     }
 
     let expander: Box<_> = Box::new(MacroRulesMacroExpander {
@@ -364,7 +376,24 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
     });
 
     if body.legacy {
-        let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable");
+        let allow_internal_unstable = attr::find_by_name(&def.attrs, "allow_internal_unstable")
+            .map(|attr| attr
+                .meta_item_list()
+                .map(|list| list.iter()
+                    .map(|it| it.name().unwrap_or_else(|| sess.span_diagnostic.span_bug(
+                        it.span, "allow internal unstable expects feature names",
+                    )))
+                    .collect::<Vec<Symbol>>().into()
+                )
+                .unwrap_or_else(|| {
+                    sess.span_diagnostic.span_warn(
+                        attr.span, "allow_internal_unstable expects list of feature names. In the \
+                        future this will become a hard error. Please use `allow_internal_unstable(\
+                        foo, bar)` to only allow the `foo` and `bar` features",
+                    );
+                    vec![Symbol::intern("allow_internal_unstable_backcompat_hack")].into()
+                })
+            );
         let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe");
         let mut local_inner_macros = false;
         if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") {
@@ -420,10 +449,10 @@ fn check_lhs_nt_follows(sess: &ParseSess,
     // after parsing/expansion. we can report every error in every macro this way.
 }
 
-/// Check that the lhs contains no repetition which could match an empty token
+/// Checks that the lhs contains no repetition which could match an empty token
 /// tree, because then the matcher would hang indefinitely.
 fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
     for tt in tts {
         match *tt {
             TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
@@ -454,6 +483,53 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
     true
 }
 
+/// Check that the LHS contains no duplicate matcher bindings. e.g. `$a:expr, $a:expr` would be
+/// illegal, since it would be ambiguous which `$a` to use if we ever needed to.
+fn check_lhs_duplicate_matcher_bindings(
+    sess: &ParseSess,
+    tts: &[quoted::TokenTree],
+    metavar_names: &mut FxHashMap<Ident, Span>,
+    node_id: ast::NodeId,
+) -> bool {
+    use self::quoted::TokenTree;
+    use crate::early_buffered_lints::BufferedEarlyLintId;
+    for tt in tts {
+        match *tt {
+            TokenTree::MetaVarDecl(span, name, _kind) => {
+                if let Some(&prev_span) = metavar_names.get(&name) {
+                    // FIXME(mark-i-m): in a few cycles, make this a hard error.
+                    // sess.span_diagnostic
+                    //     .struct_span_err(span, "duplicate matcher binding")
+                    //     .span_note(prev_span, "previous declaration was here")
+                    //     .emit();
+                    sess.buffer_lint(
+                        BufferedEarlyLintId::DuplicateMacroMatcherBindingName,
+                        crate::source_map::MultiSpan::from(vec![prev_span, span]),
+                        node_id,
+                        "duplicate matcher binding"
+                    );
+                    return false;
+                } else {
+                    metavar_names.insert(name, span);
+                }
+            }
+            TokenTree::Delimited(_, ref del) => {
+                if !check_lhs_duplicate_matcher_bindings(sess, &del.tts, metavar_names, node_id) {
+                    return false;
+                }
+            },
+            TokenTree::Sequence(_, ref seq) => {
+                if !check_lhs_duplicate_matcher_bindings(sess, &seq.tts, metavar_names, node_id) {
+                    return false;
+                }
+            }
+            _ => {}
+        }
+    }
+
+    true
+}
+
 fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
     match *rhs {
         quoted::TokenTree::Delimited(..) => return true,
@@ -497,7 +573,7 @@ struct FirstSets {
 
 impl FirstSets {
     fn new(tts: &[quoted::TokenTree]) -> FirstSets {
-        use self::quoted::TokenTree;
+        use quoted::TokenTree;
 
         let mut sets = FirstSets { first: FxHashMap::default() };
         build_recur(&mut sets, tts);
@@ -567,7 +643,7 @@ impl FirstSets {
     // walks forward over `tts` until all potential FIRST tokens are
     // identified.
     fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
-        use self::quoted::TokenTree;
+        use quoted::TokenTree;
 
         let mut first = TokenSet::empty();
         for tt in tts.iter() {
@@ -721,7 +797,7 @@ fn check_matcher_core(sess: &ParseSess,
                       first_sets: &FirstSets,
                       matcher: &[quoted::TokenTree],
                       follow: &TokenSet) -> TokenSet {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
 
     let mut last = TokenSet::empty();
 
@@ -901,8 +977,8 @@ fn token_can_be_followed_by_any(tok: &quoted::TokenTree) -> bool {
     }
 }
 
-/// True if a fragment of type `frag` can be followed by any sort of
-/// token.  We use this (among other things) as a useful approximation
+/// Returns `true` if a fragment of type `frag` can be followed by any sort of
+/// token. We use this (among other things) as a useful approximation
 /// for when `frag` can be followed by a repetition like `$(...)*` or
 /// `$(...)+`. In general, these can be a bit tricky to reason about,
 /// so we adopt a conservative position that says that any fragment
@@ -931,7 +1007,7 @@ enum IsInFollow {
     Invalid(String, &'static str),
 }
 
-/// True if `frag` can legally be followed by the token `tok`. For
+/// Returns `true` if `frag` can legally be followed by the token `tok`. For
 /// fragments that can consume an unbounded number of tokens, `tok`
 /// must be within a well-defined follow set. This is intended to
 /// guarantee future compatibility: for example, without this rule, if
@@ -940,7 +1016,7 @@ enum IsInFollow {
 /// separator.
 // when changing this do not forget to update doc/book/macros.md!
 fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
 
     if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
         // closing a token tree can never be matched by any fragment;
@@ -1072,7 +1148,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
 
 fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
-        quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
+        quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index b56871a1885..255795f28c7 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -1,13 +1,14 @@
-use ast::NodeId;
-use early_buffered_lints::BufferedEarlyLintId;
-use ext::tt::macro_parser;
-use feature_gate::Features;
-use parse::{token, ParseSess};
-use print::pprust;
-use symbol::keywords;
+use crate::ast::NodeId;
+use crate::early_buffered_lints::BufferedEarlyLintId;
+use crate::ext::tt::macro_parser;
+use crate::feature_gate::Features;
+use crate::parse::{token, ParseSess};
+use crate::print::pprust;
+use crate::tokenstream::{self, DelimSpan};
+use crate::ast;
+use crate::symbol::keywords;
+
 use syntax_pos::{edition::Edition, BytePos, Span};
-use tokenstream::{self, DelimSpan};
-use ast;
 
 use rustc_data_structures::sync::Lrc;
 use std::iter::Peekable;
@@ -21,17 +22,17 @@ pub struct Delimited {
 }
 
 impl Delimited {
-    /// Return the opening delimiter (possibly `NoDelim`).
+    /// Returns the opening delimiter (possibly `NoDelim`).
     pub fn open_token(&self) -> token::Token {
         token::OpenDelim(self.delim)
     }
 
-    /// Return the closing delimiter (possibly `NoDelim`).
+    /// Returns the closing delimiter (possibly `NoDelim`).
     pub fn close_token(&self) -> token::Token {
         token::CloseDelim(self.delim)
     }
 
-    /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
     pub fn open_tt(&self, span: Span) -> TokenTree {
         let open_span = if span.is_dummy() {
             span
@@ -41,7 +42,7 @@ impl Delimited {
         TokenTree::Token(open_span, self.open_token())
     }
 
-    /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
     pub fn close_tt(&self, span: Span) -> TokenTree {
         let close_span = if span.is_dummy() {
             span
@@ -106,7 +107,7 @@ impl TokenTree {
         }
     }
 
-    /// Returns true if the given token tree contains no other tokens. This is vacuously true for
+    /// Returns `true` if the given token tree contains no other tokens. This is vacuously true for
     /// single tokens or metavar/decls, but may be false for delimited trees or sequences.
     pub fn is_empty(&self) -> bool {
         match *self {
@@ -119,7 +120,7 @@ impl TokenTree {
         }
     }
 
-    /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
+    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
@@ -139,7 +140,7 @@ impl TokenTree {
         }
     }
 
-    /// Retrieve the `TokenTree`'s span.
+    /// Retrieves the `TokenTree`'s span.
     pub fn span(&self) -> Span {
         match *self {
             TokenTree::Token(sp, _)
@@ -410,8 +411,8 @@ where
 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
 ///
-/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018,
-/// `?` is a Kleene op and not a separator.
+/// N.B., in the 2015 edition, `*` and `+` are the only Kleene operators, and `?` is a separator.
+/// In the 2018 edition however, `?` is a Kleene operator, and not a separator.
 fn parse_sep_and_kleene_op<I>(
     input: &mut Peekable<I>,
     span: Span,
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 0ef2d3b749d..b9a50cc6488 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -1,13 +1,14 @@
-use ast::Ident;
-use ext::base::ExtCtxt;
-use ext::expand::Marker;
-use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use ext::tt::quoted;
-use fold::noop_fold_tt;
-use parse::token::{self, Token, NtTT};
-use smallvec::SmallVec;
+use crate::ast::Ident;
+use crate::ext::base::ExtCtxt;
+use crate::ext::expand::Marker;
+use crate::ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
+use crate::ext::tt::quoted;
+use crate::mut_visit::noop_visit_tt;
+use crate::parse::token::{self, Token, NtTT};
+use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::DUMMY_SP;
-use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
@@ -56,7 +57,7 @@ impl Iterator for Frame {
 /// This can do Macro-By-Example transcription. On the other hand, if
 /// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
 /// (and should) be None.
-pub fn transcribe(cx: &ExtCtxt,
+pub fn transcribe(cx: &ExtCtxt<'_>,
                   interp: Option<FxHashMap<Ident, Rc<NamedMatch>>>,
                   src: Vec<quoted::TokenTree>)
                   -> TokenStream {
@@ -170,7 +171,9 @@ pub fn transcribe(cx: &ExtCtxt,
             }
             quoted::TokenTree::Token(sp, tok) => {
                 let mut marker = Marker(cx.current_expansion.mark);
-                result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into())
+                let mut tt = TokenTree::Token(sp, tok);
+                noop_visit_tt(&mut tt, &mut marker);
+                result.push(tt.into());
             }
             quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
         }
@@ -228,7 +231,7 @@ fn lockstep_iter_size(tree: &quoted::TokenTree,
                       interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
                       repeats: &[(usize, usize)])
                       -> LockstepIterSize {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
     match *tree {
         TokenTree::Delimited(_, ref delimed) => {
             delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index bf4c637045f..7f4f8f52313 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -12,21 +12,23 @@
 //! gate usage is added, *do not remove it again* even once the feature
 //! becomes stable.
 
-use self::AttributeType::*;
-use self::AttributeGate::*;
+use AttributeType::*;
+use AttributeGate::*;
+
+use crate::ast::{self, NodeId, GenericParam, GenericParamKind, PatKind, RangeEnd};
+use crate::attr;
+use crate::early_buffered_lints::BufferedEarlyLintId;
+use crate::source_map::Spanned;
+use crate::edition::{ALL_EDITIONS, Edition};
+use crate::errors::{DiagnosticBuilder, Handler};
+use crate::visit::{self, FnKind, Visitor};
+use crate::parse::ParseSess;
+use crate::symbol::Symbol;
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_target::spec::abi::Abi;
-use ast::{self, NodeId, PatKind, RangeEnd};
-use attr;
-use early_buffered_lints::BufferedEarlyLintId;
-use source_map::Spanned;
-use edition::{ALL_EDITIONS, Edition};
 use syntax_pos::{Span, DUMMY_SP};
-use errors::{DiagnosticBuilder, Handler};
-use visit::{self, FnKind, Visitor};
-use parse::ParseSess;
-use symbol::Symbol;
+use log::debug;
 
 use std::env;
 
@@ -460,8 +462,14 @@ declare_features! (
     // Re-Rebalance coherence
     (active, re_rebalance_coherence, "1.32.0", Some(55437), None),
 
+    // Const generic types.
+    (active, const_generics, "1.34.0", Some(44580), None),
+
     // #[optimize(X)]
     (active, optimize_attribute, "1.34.0", Some(54882), None),
+
+    // #[repr(align(X))] on enums
+    (active, repr_align_enum, "1.34.0", Some(57996), None),
 );
 
 declare_features! (
@@ -729,7 +737,7 @@ pub struct AttributeTemplate {
 }
 
 impl AttributeTemplate {
-    /// Check that the given meta-item is compatible with this template.
+    /// Checks that the given meta-item is compatible with this template.
     fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool {
         match meta_item_kind {
             ast::MetaItemKind::Word => self.word,
@@ -741,7 +749,7 @@ impl AttributeTemplate {
 }
 
 /// A convenience macro for constructing attribute templates.
-/// E.g. `template!(Word, List: "description")` means that the attribute
+/// E.g., `template!(Word, List: "description")` means that the attribute
 /// supports forms `#[attr]` and `#[attr(description)]`.
 macro_rules! template {
     (Word) => { template!(@ true, None, None) };
@@ -778,8 +786,8 @@ pub enum Stability {
 }
 
 // fn() is not Debug
-impl ::std::fmt::Debug for AttributeGate {
-    fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
+impl std::fmt::Debug for AttributeGate {
+    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match *self {
             Gated(ref stab, name, expl, _) =>
                 write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
@@ -1083,7 +1091,8 @@ pub const BUILTIN_ATTRIBUTES: &[(&str, AttributeType, AttributeTemplate, Attribu
                                               stable",
                                              cfg_fn!(profiler_runtime))),
 
-    ("allow_internal_unstable", Normal, template!(Word), Gated(Stability::Unstable,
+    ("allow_internal_unstable", Normal, template!(Word, List: "feat1, feat2, ..."),
+                                              Gated(Stability::Unstable,
                                               "allow_internal_unstable",
                                               EXPLAIN_ALLOW_INTERNAL_UNSTABLE,
                                               cfg_fn!(allow_internal_unstable))),
@@ -1191,7 +1200,7 @@ pub const BUILTIN_ATTRIBUTES: &[(&str, AttributeType, AttributeTemplate, Attribu
     ("proc_macro", Normal, template!(Word), Ungated),
 
     ("rustc_proc_macro_decls", Normal, template!(Word), Gated(Stability::Unstable,
-                                             "rustc_proc_macro_decls",
+                                             "rustc_attrs",
                                              "used internally by rustc",
                                              cfg_fn!(rustc_attrs))),
 
@@ -1276,7 +1285,7 @@ impl GatedCfg {
 
     pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) {
         let (cfg, feature, has_feature) = GATED_CFGS[self.index];
-        if !has_feature(features) && !self.span.allows_unstable() {
+        if !has_feature(features) && !self.span.allows_unstable(feature) {
             let explain = format!("`cfg({})` is experimental and subject to change", cfg);
             emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain);
         }
@@ -1295,7 +1304,7 @@ macro_rules! gate_feature_fn {
              name, explain, level) = ($cx, $has_feature, $span, $name, $explain, $level);
         let has_feature: bool = has_feature(&$cx.features);
         debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
-        if !has_feature && !span.allows_unstable() {
+        if !has_feature && !span.allows_unstable($name) {
             leveled_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain, level)
                 .emit();
         }
@@ -1320,7 +1329,11 @@ impl<'a> Context<'a> {
         for &(n, ty, _template, ref gateage) in BUILTIN_ATTRIBUTES {
             if name == n {
                 if let Gated(_, name, desc, ref has_feature) = *gateage {
-                    gate_feature_fn!(self, has_feature, attr.span, name, desc, GateStrength::Hard);
+                    if !attr.span.allows_unstable(name) {
+                        gate_feature_fn!(
+                            self, has_feature, attr.span, name, desc, GateStrength::Hard
+                        );
+                    }
                 } else if name == "doc" {
                     if let Some(content) = attr.meta_item_list() {
                         if content.iter().any(|c| c.check_name("include")) {
@@ -1485,13 +1498,13 @@ struct PostExpansionVisitor<'a> {
 macro_rules! gate_feature_post {
     ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{
         let (cx, span) = ($cx, $span);
-        if !span.allows_unstable() {
+        if !span.allows_unstable(stringify!($feature)) {
             gate_feature!(cx.context, $feature, span, $explain)
         }
     }};
     ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => {{
         let (cx, span) = ($cx, $span);
-        if !span.allows_unstable() {
+        if !span.allows_unstable(stringify!($feature)) {
             gate_feature!(cx.context, $feature, span, $explain, $level)
         }
     }}
@@ -1602,10 +1615,8 @@ impl<'a> PostExpansionVisitor<'a> {
 
 impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
     fn visit_attribute(&mut self, attr: &ast::Attribute) {
-        if !attr.span.allows_unstable() {
-            // check for gated attributes
-            self.context.check_attribute(attr, false);
-        }
+        // check for gated attributes
+        self.context.check_attribute(attr, false);
 
         if attr.check_name("doc") {
             if let Some(content) = attr.meta_item_list() {
@@ -1698,6 +1709,17 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
                 }
             }
 
+            ast::ItemKind::Enum(..) => {
+                for attr in attr::filter_by_name(&i.attrs[..], "repr") {
+                    for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
+                        if item.check_name("align") {
+                            gate_feature_post!(&self, repr_align_enum, attr.span,
+                                               "`#[repr(align(x))]` on enums is experimental");
+                        }
+                    }
+                }
+            }
+
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
                 if polarity == ast::ImplPolarity::Negative {
                     gate_feature_post!(&self, optin_builtin_traits,
@@ -1883,6 +1905,14 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
         visit::walk_fn(self, fn_kind, fn_decl, span);
     }
 
+    fn visit_generic_param(&mut self, param: &'a GenericParam) {
+        if let GenericParamKind::Const { .. } = param.kind {
+            gate_feature_post!(&self, const_generics, param.ident.span,
+                "const generics are unstable");
+        }
+        visit::walk_generic_param(self, param);
+    }
+
     fn visit_trait_item(&mut self, ti: &'a ast::TraitItem) {
         match ti.node {
             ast::TraitItemKind::Method(ref sig, ref block) => {
@@ -1968,7 +1998,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
     // Some features are known to be incomplete and using them is likely to have
     // unanticipated results, such as compiler crashes. We warn the user about these
     // to alert them.
-    let incomplete_features = ["generic_associated_types"];
+    let incomplete_features = ["generic_associated_types", "const_generics"];
 
     let mut features = Features::new();
     let mut edition_enabled_features = FxHashMap::default();
@@ -2118,8 +2148,7 @@ pub fn check_crate(krate: &ast::Crate,
 
 #[derive(Clone, Copy, Hash)]
 pub enum UnstableFeatures {
-    /// Hard errors for unstable features are active, as on
-    /// beta/stable channels.
+    /// Hard errors for unstable features are active, as on beta/stable channels.
     Disallow,
     /// Allow features to be activated, as on nightly.
     Allow,
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
deleted file mode 100644
index fdcbbb939a6..00000000000
--- a/src/libsyntax/fold.rs
+++ /dev/null
@@ -1,1495 +0,0 @@
-//! A Folder represents an AST->AST fold; it accepts an AST piece,
-//! and returns a piece of the same type. So, for instance, macro
-//! expansion is a Folder that walks over an AST and produces another
-//! AST.
-//!
-//! Note: using a Folder (other than the MacroExpander Folder) on
-//! an AST before macro expansion is probably a bad idea. For instance,
-//! a folder renaming item names in a module will miss all of those
-//! that are created by the expansion of a macro.
-
-use ast::*;
-use ast;
-use syntax_pos::Span;
-use source_map::{Spanned, respan};
-use parse::token::{self, Token};
-use ptr::P;
-use smallvec::{Array, SmallVec};
-use symbol::keywords;
-use ThinVec;
-use tokenstream::*;
-use util::move_map::MoveMap;
-
-use rustc_data_structures::sync::Lrc;
-
-pub trait ExpectOne<A: Array> {
-    fn expect_one(self, err: &'static str) -> A::Item;
-}
-
-impl<A: Array> ExpectOne<A> for SmallVec<A> {
-    fn expect_one(self, err: &'static str) -> A::Item {
-        assert!(self.len() == 1, err);
-        self.into_iter().next().unwrap()
-    }
-}
-
-pub trait Folder : Sized {
-    // Any additions to this trait should happen in form
-    // of a call to a public `noop_*` function that only calls
-    // out to the folder again, not other `noop_*` functions.
-    //
-    // This is a necessary API workaround to the problem of not
-    // being able to call out to the super default method
-    // in an overridden default method.
-
-    fn fold_crate(&mut self, c: Crate) -> Crate {
-        noop_fold_crate(c, self)
-    }
-
-    fn fold_meta_items(&mut self, meta_items: Vec<MetaItem>) -> Vec<MetaItem> {
-        noop_fold_meta_items(meta_items, self)
-    }
-
-    fn fold_meta_list_item(&mut self, list_item: NestedMetaItem) -> NestedMetaItem {
-        noop_fold_meta_list_item(list_item, self)
-    }
-
-    fn fold_meta_item(&mut self, meta_item: MetaItem) -> MetaItem {
-        noop_fold_meta_item(meta_item, self)
-    }
-
-    fn fold_use_tree(&mut self, use_tree: UseTree) -> UseTree {
-        noop_fold_use_tree(use_tree, self)
-    }
-
-    fn fold_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]> {
-        noop_fold_foreign_item(ni, self)
-    }
-
-    fn fold_foreign_item_simple(&mut self, ni: ForeignItem) -> ForeignItem {
-        noop_fold_foreign_item_simple(ni, self)
-    }
-
-    fn fold_item(&mut self, i: P<Item>) -> SmallVec<[P<Item>; 1]> {
-        noop_fold_item(i, self)
-    }
-
-    fn fold_item_simple(&mut self, i: Item) -> Item {
-        noop_fold_item_simple(i, self)
-    }
-
-    fn fold_fn_header(&mut self, header: FnHeader) -> FnHeader {
-        noop_fold_fn_header(header, self)
-    }
-
-    fn fold_struct_field(&mut self, sf: StructField) -> StructField {
-        noop_fold_struct_field(sf, self)
-    }
-
-    fn fold_item_kind(&mut self, i: ItemKind) -> ItemKind {
-        noop_fold_item_kind(i, self)
-    }
-
-    fn fold_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> {
-        noop_fold_trait_item(i, self)
-    }
-
-    fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> {
-        noop_fold_impl_item(i, self)
-    }
-
-    fn fold_fn_decl(&mut self, d: P<FnDecl>) -> P<FnDecl> {
-        noop_fold_fn_decl(d, self)
-    }
-
-    fn fold_asyncness(&mut self, a: IsAsync) -> IsAsync {
-        noop_fold_asyncness(a, self)
-    }
-
-    fn fold_block(&mut self, b: P<Block>) -> P<Block> {
-        noop_fold_block(b, self)
-    }
-
-    fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> {
-        noop_fold_stmt(s, self)
-    }
-
-    fn fold_arm(&mut self, a: Arm) -> Arm {
-        noop_fold_arm(a, self)
-    }
-
-    fn fold_guard(&mut self, g: Guard) -> Guard {
-        noop_fold_guard(g, self)
-    }
-
-    fn fold_pat(&mut self, p: P<Pat>) -> P<Pat> {
-        noop_fold_pat(p, self)
-    }
-
-    fn fold_anon_const(&mut self, c: AnonConst) -> AnonConst {
-        noop_fold_anon_const(c, self)
-    }
-
-    fn fold_expr(&mut self, e: P<Expr>) -> P<Expr> {
-        e.map(|e| noop_fold_expr(e, self))
-    }
-
-    fn fold_range_end(&mut self, re: RangeEnd) -> RangeEnd {
-        noop_fold_range_end(re, self)
-    }
-
-    fn fold_opt_expr(&mut self, e: P<Expr>) -> Option<P<Expr>> {
-        noop_fold_opt_expr(e, self)
-    }
-
-    fn fold_exprs(&mut self, es: Vec<P<Expr>>) -> Vec<P<Expr>> {
-        noop_fold_exprs(es, self)
-    }
-
-    fn fold_generic_arg(&mut self, arg: GenericArg) -> GenericArg {
-        match arg {
-            GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.fold_lifetime(lt)),
-            GenericArg::Type(ty) => GenericArg::Type(self.fold_ty(ty)),
-        }
-    }
-
-    fn fold_ty(&mut self, t: P<Ty>) -> P<Ty> {
-        noop_fold_ty(t, self)
-    }
-
-    fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime {
-        noop_fold_lifetime(l, self)
-    }
-
-    fn fold_ty_binding(&mut self, t: TypeBinding) -> TypeBinding {
-        noop_fold_ty_binding(t, self)
-    }
-
-    fn fold_mod(&mut self, m: Mod) -> Mod {
-        noop_fold_mod(m, self)
-    }
-
-    fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod {
-        noop_fold_foreign_mod(nm, self)
-    }
-
-    fn fold_global_asm(&mut self, ga: P<GlobalAsm>) -> P<GlobalAsm> {
-        noop_fold_global_asm(ga, self)
-    }
-
-    fn fold_variant(&mut self, v: Variant) -> Variant {
-        noop_fold_variant(v, self)
-    }
-
-    fn fold_ident(&mut self, i: Ident) -> Ident {
-        noop_fold_ident(i, self)
-    }
-
-    fn fold_usize(&mut self, i: usize) -> usize {
-        noop_fold_usize(i, self)
-    }
-
-    fn fold_path(&mut self, p: Path) -> Path {
-        noop_fold_path(p, self)
-    }
-
-    fn fold_qpath(&mut self, qs: Option<QSelf>, p: Path) -> (Option<QSelf>, Path) {
-        noop_fold_qpath(qs, p, self)
-    }
-
-    fn fold_generic_args(&mut self, p: GenericArgs) -> GenericArgs {
-        noop_fold_generic_args(p, self)
-    }
-
-    fn fold_angle_bracketed_parameter_data(&mut self, p: AngleBracketedArgs)
-                                           -> AngleBracketedArgs
-    {
-        noop_fold_angle_bracketed_parameter_data(p, self)
-    }
-
-    fn fold_parenthesized_parameter_data(&mut self, p: ParenthesizedArgs)
-                                         -> ParenthesizedArgs
-    {
-        noop_fold_parenthesized_parameter_data(p, self)
-    }
-
-    fn fold_local(&mut self, l: P<Local>) -> P<Local> {
-        noop_fold_local(l, self)
-    }
-
-    fn fold_mac(&mut self, _mac: Mac) -> Mac {
-        panic!("fold_mac disabled by default");
-        // N.B., see note about macros above.
-        // if you really want a folder that
-        // works on macros, use this
-        // definition in your trait impl:
-        // fold::noop_fold_mac(_mac, self)
-    }
-
-    fn fold_macro_def(&mut self, def: MacroDef) -> MacroDef {
-        noop_fold_macro_def(def, self)
-    }
-
-    fn fold_label(&mut self, label: Label) -> Label {
-        noop_fold_label(label, self)
-    }
-
-    fn fold_attribute(&mut self, at: Attribute) -> Option<Attribute> {
-        noop_fold_attribute(at, self)
-    }
-
-    fn fold_arg(&mut self, a: Arg) -> Arg {
-        noop_fold_arg(a, self)
-    }
-
-    fn fold_generics(&mut self, generics: Generics) -> Generics {
-        noop_fold_generics(generics, self)
-    }
-
-    fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef {
-        noop_fold_trait_ref(p, self)
-    }
-
-    fn fold_poly_trait_ref(&mut self, p: PolyTraitRef) -> PolyTraitRef {
-        noop_fold_poly_trait_ref(p, self)
-    }
-
-    fn fold_variant_data(&mut self, vdata: VariantData) -> VariantData {
-        noop_fold_variant_data(vdata, self)
-    }
-
-    fn fold_generic_param(&mut self, param: GenericParam) -> GenericParam {
-        noop_fold_generic_param(param, self)
-    }
-
-    fn fold_generic_params(&mut self, params: Vec<GenericParam>) -> Vec<GenericParam> {
-        noop_fold_generic_params(params, self)
-    }
-
-    fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
-        noop_fold_tt(tt, self)
-    }
-
-    fn fold_tts(&mut self, tts: TokenStream) -> TokenStream {
-        noop_fold_tts(tts, self)
-    }
-
-    fn fold_token(&mut self, t: token::Token) -> token::Token {
-        noop_fold_token(t, self)
-    }
-
-    fn fold_interpolated(&mut self, nt: token::Nonterminal) -> token::Nonterminal {
-        noop_fold_interpolated(nt, self)
-    }
-
-    fn fold_opt_bounds(&mut self, b: Option<GenericBounds>) -> Option<GenericBounds> {
-        noop_fold_opt_bounds(b, self)
-    }
-
-    fn fold_bounds(&mut self, b: GenericBounds) -> GenericBounds {
-        noop_fold_bounds(b, self)
-    }
-
-    fn fold_param_bound(&mut self, tpb: GenericBound) -> GenericBound {
-        noop_fold_param_bound(tpb, self)
-    }
-
-    fn fold_mt(&mut self, mt: MutTy) -> MutTy {
-        noop_fold_mt(mt, self)
-    }
-
-    fn fold_field(&mut self, field: Field) -> Field {
-        noop_fold_field(field, self)
-    }
-
-    fn fold_where_clause(&mut self, where_clause: WhereClause)
-                         -> WhereClause {
-        noop_fold_where_clause(where_clause, self)
-    }
-
-    fn fold_where_predicate(&mut self, where_predicate: WherePredicate)
-                            -> WherePredicate {
-        noop_fold_where_predicate(where_predicate, self)
-    }
-
-    fn fold_vis(&mut self, vis: Visibility) -> Visibility {
-        noop_fold_vis(vis, self)
-    }
-
-    fn new_id(&mut self, i: NodeId) -> NodeId {
-        i
-    }
-
-    fn new_span(&mut self, sp: Span) -> Span {
-        sp
-    }
-}
-
-pub fn noop_fold_meta_items<T: Folder>(meta_items: Vec<MetaItem>, fld: &mut T) -> Vec<MetaItem> {
-    meta_items.move_map(|x| fld.fold_meta_item(x))
-}
-
-pub fn noop_fold_use_tree<T: Folder>(use_tree: UseTree, fld: &mut T) -> UseTree {
-    UseTree {
-        span: fld.new_span(use_tree.span),
-        prefix: fld.fold_path(use_tree.prefix),
-        kind: match use_tree.kind {
-            UseTreeKind::Simple(rename, id1, id2) =>
-                UseTreeKind::Simple(rename.map(|ident| fld.fold_ident(ident)),
-                                    fld.new_id(id1), fld.new_id(id2)),
-            UseTreeKind::Glob => UseTreeKind::Glob,
-            UseTreeKind::Nested(items) => UseTreeKind::Nested(items.move_map(|(tree, id)| {
-                (fld.fold_use_tree(tree), fld.new_id(id))
-            })),
-        },
-    }
-}
-
-pub fn fold_attrs<T: Folder>(attrs: Vec<Attribute>, fld: &mut T) -> Vec<Attribute> {
-    attrs.move_flat_map(|x| fld.fold_attribute(x))
-}
-
-pub fn fold_thin_attrs<T: Folder>(attrs: ThinVec<Attribute>, fld: &mut T) -> ThinVec<Attribute> {
-    fold_attrs(attrs.into(), fld).into()
-}
-
-pub fn noop_fold_arm<T: Folder>(Arm {attrs, pats, guard, body}: Arm,
-    fld: &mut T) -> Arm {
-    Arm {
-        attrs: fold_attrs(attrs, fld),
-        pats: pats.move_map(|x| fld.fold_pat(x)),
-        guard: guard.map(|x| fld.fold_guard(x)),
-        body: fld.fold_expr(body),
-    }
-}
-
-pub fn noop_fold_guard<T: Folder>(g: Guard, fld: &mut T) -> Guard {
-    match g {
-        Guard::If(e) => Guard::If(fld.fold_expr(e)),
-    }
-}
-
-pub fn noop_fold_ty_binding<T: Folder>(b: TypeBinding, fld: &mut T) -> TypeBinding {
-    TypeBinding {
-        id: fld.new_id(b.id),
-        ident: fld.fold_ident(b.ident),
-        ty: fld.fold_ty(b.ty),
-        span: fld.new_span(b.span),
-    }
-}
-
-pub fn noop_fold_ty<T: Folder>(t: P<Ty>, fld: &mut T) -> P<Ty> {
-    t.map(|Ty {id, node, span}| Ty {
-        id: fld.new_id(id),
-        node: match node {
-            TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => node,
-            TyKind::Slice(ty) => TyKind::Slice(fld.fold_ty(ty)),
-            TyKind::Ptr(mt) => TyKind::Ptr(fld.fold_mt(mt)),
-            TyKind::Rptr(region, mt) => {
-                TyKind::Rptr(region.map(|lt| noop_fold_lifetime(lt, fld)), fld.fold_mt(mt))
-            }
-            TyKind::BareFn(f) => {
-                TyKind::BareFn(f.map(|BareFnTy {generic_params, unsafety, abi, decl}| BareFnTy {
-                    generic_params: fld.fold_generic_params(generic_params),
-                    unsafety,
-                    abi,
-                    decl: fld.fold_fn_decl(decl)
-                }))
-            }
-            TyKind::Never => node,
-            TyKind::Tup(tys) => TyKind::Tup(tys.move_map(|ty| fld.fold_ty(ty))),
-            TyKind::Paren(ty) => TyKind::Paren(fld.fold_ty(ty)),
-            TyKind::Path(qself, path) => {
-                let (qself, path) = fld.fold_qpath(qself, path);
-                TyKind::Path(qself, path)
-            }
-            TyKind::Array(ty, length) => {
-                TyKind::Array(fld.fold_ty(ty), fld.fold_anon_const(length))
-            }
-            TyKind::Typeof(expr) => {
-                TyKind::Typeof(fld.fold_anon_const(expr))
-            }
-            TyKind::TraitObject(bounds, syntax) => {
-                TyKind::TraitObject(bounds.move_map(|b| fld.fold_param_bound(b)), syntax)
-            }
-            TyKind::ImplTrait(id, bounds) => {
-                TyKind::ImplTrait(fld.new_id(id), bounds.move_map(|b| fld.fold_param_bound(b)))
-            }
-            TyKind::Mac(mac) => {
-                TyKind::Mac(fld.fold_mac(mac))
-            }
-        },
-        span: fld.new_span(span)
-    })
-}
-
-pub fn noop_fold_foreign_mod<T: Folder>(ForeignMod {abi, items}: ForeignMod,
-                                        fld: &mut T) -> ForeignMod {
-    ForeignMod {
-        abi,
-        items: items.move_flat_map(|x| fld.fold_foreign_item(x)),
-    }
-}
-
-pub fn noop_fold_global_asm<T: Folder>(ga: P<GlobalAsm>,
-                                       _: &mut T) -> P<GlobalAsm> {
-    ga
-}
-
-pub fn noop_fold_variant<T: Folder>(v: Variant, fld: &mut T) -> Variant {
-    Spanned {
-        node: Variant_ {
-            ident: fld.fold_ident(v.node.ident),
-            attrs: fold_attrs(v.node.attrs, fld),
-            data: fld.fold_variant_data(v.node.data),
-            disr_expr: v.node.disr_expr.map(|e| fld.fold_anon_const(e)),
-        },
-        span: fld.new_span(v.span),
-    }
-}
-
-pub fn noop_fold_ident<T: Folder>(ident: Ident, fld: &mut T) -> Ident {
-    Ident::new(ident.name, fld.new_span(ident.span))
-}
-
-pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize {
-    i
-}
-
-pub fn noop_fold_path<T: Folder>(Path { segments, span }: Path, fld: &mut T) -> Path {
-    Path {
-        segments: segments.move_map(|PathSegment { ident, id, args }| PathSegment {
-            ident: fld.fold_ident(ident),
-            id: fld.new_id(id),
-            args: args.map(|args| args.map(|args| fld.fold_generic_args(args))),
-        }),
-        span: fld.new_span(span)
-    }
-}
-
-pub fn noop_fold_qpath<T: Folder>(qself: Option<QSelf>,
-                                  path: Path,
-                                  fld: &mut T) -> (Option<QSelf>, Path) {
-    let qself = qself.map(|QSelf { ty, path_span, position }| {
-        QSelf {
-            ty: fld.fold_ty(ty),
-            path_span: fld.new_span(path_span),
-            position,
-        }
-    });
-    (qself, fld.fold_path(path))
-}
-
-pub fn noop_fold_generic_args<T: Folder>(generic_args: GenericArgs, fld: &mut T) -> GenericArgs
-{
-    match generic_args {
-        GenericArgs::AngleBracketed(data) => {
-            GenericArgs::AngleBracketed(fld.fold_angle_bracketed_parameter_data(data))
-        }
-        GenericArgs::Parenthesized(data) => {
-            GenericArgs::Parenthesized(fld.fold_parenthesized_parameter_data(data))
-        }
-    }
-}
-
-pub fn noop_fold_angle_bracketed_parameter_data<T: Folder>(data: AngleBracketedArgs,
-                                                           fld: &mut T)
-                                                           -> AngleBracketedArgs
-{
-    let AngleBracketedArgs { args, bindings, span } = data;
-    AngleBracketedArgs {
-        args: args.move_map(|arg| fld.fold_generic_arg(arg)),
-        bindings: bindings.move_map(|b| fld.fold_ty_binding(b)),
-        span: fld.new_span(span)
-    }
-}
-
-pub fn noop_fold_parenthesized_parameter_data<T: Folder>(data: ParenthesizedArgs,
-                                                         fld: &mut T)
-                                                         -> ParenthesizedArgs
-{
-    let ParenthesizedArgs { inputs, output, span } = data;
-    ParenthesizedArgs {
-        inputs: inputs.move_map(|ty| fld.fold_ty(ty)),
-        output: output.map(|ty| fld.fold_ty(ty)),
-        span: fld.new_span(span)
-    }
-}
-
-pub fn noop_fold_local<T: Folder>(l: P<Local>, fld: &mut T) -> P<Local> {
-    l.map(|Local {id, pat, ty, init, span, attrs}| Local {
-        id: fld.new_id(id),
-        pat: fld.fold_pat(pat),
-        ty: ty.map(|t| fld.fold_ty(t)),
-        init: init.map(|e| fld.fold_expr(e)),
-        span: fld.new_span(span),
-        attrs: fold_attrs(attrs.into(), fld).into(),
-    })
-}
-
-pub fn noop_fold_attribute<T: Folder>(attr: Attribute, fld: &mut T) -> Option<Attribute> {
-    Some(Attribute {
-        id: attr.id,
-        style: attr.style,
-        path: fld.fold_path(attr.path),
-        tokens: fld.fold_tts(attr.tokens),
-        is_sugared_doc: attr.is_sugared_doc,
-        span: fld.new_span(attr.span),
-    })
-}
-
-pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac {
-    Spanned {
-        node: Mac_ {
-            tts: fld.fold_tts(node.stream()).into(),
-            path: fld.fold_path(node.path),
-            delim: node.delim,
-        },
-        span: fld.new_span(span)
-    }
-}
-
-pub fn noop_fold_macro_def<T: Folder>(def: MacroDef, fld: &mut T) -> MacroDef {
-    MacroDef {
-        tokens: fld.fold_tts(def.tokens.into()).into(),
-        legacy: def.legacy,
-    }
-}
-
-pub fn noop_fold_meta_list_item<T: Folder>(li: NestedMetaItem, fld: &mut T)
-    -> NestedMetaItem {
-    Spanned {
-        node: match li.node {
-            NestedMetaItemKind::MetaItem(mi) =>  {
-                NestedMetaItemKind::MetaItem(fld.fold_meta_item(mi))
-            },
-            NestedMetaItemKind::Literal(lit) => NestedMetaItemKind::Literal(lit)
-        },
-        span: fld.new_span(li.span)
-    }
-}
-
-pub fn noop_fold_meta_item<T: Folder>(mi: MetaItem, fld: &mut T) -> MetaItem {
-    MetaItem {
-        ident: mi.ident,
-        node: match mi.node {
-            MetaItemKind::Word => MetaItemKind::Word,
-            MetaItemKind::List(mis) => {
-                MetaItemKind::List(mis.move_map(|e| fld.fold_meta_list_item(e)))
-            },
-            MetaItemKind::NameValue(s) => MetaItemKind::NameValue(s),
-        },
-        span: fld.new_span(mi.span)
-    }
-}
-
-pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
-    Arg {
-        id: fld.new_id(id),
-        pat: fld.fold_pat(pat),
-        ty: fld.fold_ty(ty)
-    }
-}
-
-pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
-    match tt {
-        TokenTree::Token(span, tok) =>
-            TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
-        TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
-            DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
-            delim,
-            fld.fold_tts(tts).into(),
-        ),
-    }
-}
-
-pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream {
-    tts.map(|tt| fld.fold_tt(tt))
-}
-
-// apply ident folder if it's an ident, apply other folds to interpolated nodes
-pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
-    match t {
-        token::Ident(id, is_raw) => token::Ident(fld.fold_ident(id), is_raw),
-        token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
-        token::Interpolated(nt) => {
-            let nt = match Lrc::try_unwrap(nt) {
-                Ok(nt) => nt,
-                Err(nt) => (*nt).clone(),
-            };
-            Token::interpolated(fld.fold_interpolated(nt.0))
-        }
-        _ => t
-    }
-}
-
-/// apply folder to elements of interpolated nodes
-//
-// N.B., this can occur only when applying a fold to partially expanded code, where
-// parsed pieces have gotten implanted ito *other* macro invocations. This is relevant
-// for macro hygiene, but possibly not elsewhere.
-//
-// One problem here occurs because the types for fold_item, fold_stmt, etc. allow the
-// folder to return *multiple* items; this is a problem for the nodes here, because
-// they insist on having exactly one piece. One solution would be to mangle the fold
-// trait to include one-to-many and one-to-one versions of these entry points, but that
-// would probably confuse a lot of people and help very few. Instead, I'm just going
-// to put in dynamic checks. I think the performance impact of this will be pretty much
-// nonexistent. The danger is that someone will apply a fold to a partially expanded
-// node, and will be confused by the fact that their "fold_item" or "fold_stmt" isn't
-// getting called on NtItem or NtStmt nodes. Hopefully they'll wind up reading this
-// comment, and doing something appropriate.
-//
-// BTW, design choice: I considered just changing the type of, e.g., NtItem to contain
-// multiple items, but decided against it when I looked at parse_item_or_view_item and
-// tried to figure out what I would do with multiple items there....
-pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
-                                         -> token::Nonterminal {
-    match nt {
-        token::NtItem(item) =>
-            token::NtItem(fld.fold_item(item)
-                          // this is probably okay, because the only folds likely
-                          // to peek inside interpolated nodes will be renamings/markings,
-                          // which map single items to single items
-                          .expect_one("expected fold to produce exactly one item")),
-        token::NtBlock(block) => token::NtBlock(fld.fold_block(block)),
-        token::NtStmt(stmt) =>
-            token::NtStmt(fld.fold_stmt(stmt)
-                          // this is probably okay, because the only folds likely
-                          // to peek inside interpolated nodes will be renamings/markings,
-                          // which map single items to single items
-                          .expect_one("expected fold to produce exactly one statement")),
-        token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
-        token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
-        token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
-        token::NtIdent(ident, is_raw) => token::NtIdent(fld.fold_ident(ident), is_raw),
-        token::NtLifetime(ident) => token::NtLifetime(fld.fold_ident(ident)),
-        token::NtLiteral(expr) => token::NtLiteral(fld.fold_expr(expr)),
-        token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)),
-        token::NtPath(path) => token::NtPath(fld.fold_path(path)),
-        token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
-        token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
-        token::NtImplItem(item) =>
-            token::NtImplItem(fld.fold_impl_item(item)
-                              .expect_one("expected fold to produce exactly one item")),
-        token::NtTraitItem(item) =>
-            token::NtTraitItem(fld.fold_trait_item(item)
-                               .expect_one("expected fold to produce exactly one item")),
-        token::NtGenerics(generics) => token::NtGenerics(fld.fold_generics(generics)),
-        token::NtWhereClause(where_clause) =>
-            token::NtWhereClause(fld.fold_where_clause(where_clause)),
-        token::NtArg(arg) => token::NtArg(fld.fold_arg(arg)),
-        token::NtVis(vis) => token::NtVis(fld.fold_vis(vis)),
-        token::NtForeignItem(ni) =>
-            token::NtForeignItem(fld.fold_foreign_item(ni)
-                                 // see reasoning above
-                                 .expect_one("expected fold to produce exactly one item")),
-    }
-}
-
-pub fn noop_fold_asyncness<T: Folder>(asyncness: IsAsync, fld: &mut T) -> IsAsync {
-    match asyncness {
-        IsAsync::Async { closure_id, return_impl_trait_id } => IsAsync::Async {
-            closure_id: fld.new_id(closure_id),
-            return_impl_trait_id: fld.new_id(return_impl_trait_id),
-        },
-        IsAsync::NotAsync => IsAsync::NotAsync,
-    }
-}
-
-pub fn noop_fold_fn_decl<T: Folder>(decl: P<FnDecl>, fld: &mut T) -> P<FnDecl> {
-    decl.map(|FnDecl {inputs, output, variadic}| FnDecl {
-        inputs: inputs.move_map(|x| fld.fold_arg(x)),
-        output: match output {
-            FunctionRetTy::Ty(ty) => FunctionRetTy::Ty(fld.fold_ty(ty)),
-            FunctionRetTy::Default(span) => FunctionRetTy::Default(fld.new_span(span)),
-        },
-        variadic,
-    })
-}
-
-pub fn noop_fold_param_bound<T>(pb: GenericBound, fld: &mut T) -> GenericBound where T: Folder {
-    match pb {
-        GenericBound::Trait(ty, modifier) => {
-            GenericBound::Trait(fld.fold_poly_trait_ref(ty), modifier)
-        }
-        GenericBound::Outlives(lifetime) => {
-            GenericBound::Outlives(noop_fold_lifetime(lifetime, fld))
-        }
-    }
-}
-
-pub fn noop_fold_generic_param<T: Folder>(param: GenericParam, fld: &mut T) -> GenericParam {
-    let attrs: Vec<_> = param.attrs.into();
-    GenericParam {
-        ident: fld.fold_ident(param.ident),
-        id: fld.new_id(param.id),
-        attrs: attrs.into_iter()
-                    .flat_map(|x| fld.fold_attribute(x).into_iter())
-                    .collect::<Vec<_>>()
-                    .into(),
-        bounds: param.bounds.move_map(|l| noop_fold_param_bound(l, fld)),
-        kind: match param.kind {
-            GenericParamKind::Lifetime => GenericParamKind::Lifetime,
-            GenericParamKind::Type { default } => GenericParamKind::Type {
-                default: default.map(|ty| fld.fold_ty(ty))
-            }
-        }
-    }
-}
-
-pub fn noop_fold_generic_params<T: Folder>(
-    params: Vec<GenericParam>,
-    fld: &mut T
-) -> Vec<GenericParam> {
-    params.move_map(|p| fld.fold_generic_param(p))
-}
-
-pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label {
-    Label {
-        ident: fld.fold_ident(label.ident),
-    }
-}
-
-fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime {
-    Lifetime {
-        id: fld.new_id(l.id),
-        ident: fld.fold_ident(l.ident),
-    }
-}
-
-pub fn noop_fold_generics<T: Folder>(Generics { params, where_clause, span }: Generics,
-                                     fld: &mut T) -> Generics {
-    Generics {
-        params: fld.fold_generic_params(params),
-        where_clause: fld.fold_where_clause(where_clause),
-        span: fld.new_span(span),
-    }
-}
-
-pub fn noop_fold_where_clause<T: Folder>(
-                              WhereClause {id, predicates, span}: WhereClause,
-                              fld: &mut T)
-                              -> WhereClause {
-    WhereClause {
-        id: fld.new_id(id),
-        predicates: predicates.move_map(|predicate| {
-            fld.fold_where_predicate(predicate)
-        }),
-        span,
-    }
-}
-
-pub fn noop_fold_where_predicate<T: Folder>(
-                                 pred: WherePredicate,
-                                 fld: &mut T)
-                                 -> WherePredicate {
-    match pred {
-        ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{bound_generic_params,
-                                                                     bounded_ty,
-                                                                     bounds,
-                                                                     span}) => {
-            ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
-                bound_generic_params: fld.fold_generic_params(bound_generic_params),
-                bounded_ty: fld.fold_ty(bounded_ty),
-                bounds: bounds.move_map(|x| fld.fold_param_bound(x)),
-                span: fld.new_span(span)
-            })
-        }
-        ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{lifetime,
-                                                                       bounds,
-                                                                       span}) => {
-            ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate {
-                span: fld.new_span(span),
-                lifetime: noop_fold_lifetime(lifetime, fld),
-                bounds: bounds.move_map(|bound| noop_fold_param_bound(bound, fld))
-            })
-        }
-        ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{id,
-                                                               lhs_ty,
-                                                               rhs_ty,
-                                                               span}) => {
-            ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{
-                id: fld.new_id(id),
-                lhs_ty: fld.fold_ty(lhs_ty),
-                rhs_ty: fld.fold_ty(rhs_ty),
-                span: fld.new_span(span)
-            })
-        }
-    }
-}
-
-pub fn noop_fold_variant_data<T: Folder>(vdata: VariantData, fld: &mut T) -> VariantData {
-    match vdata {
-        ast::VariantData::Struct(fields, id) => {
-            ast::VariantData::Struct(fields.move_map(|f| fld.fold_struct_field(f)),
-                                     fld.new_id(id))
-        }
-        ast::VariantData::Tuple(fields, id) => {
-            ast::VariantData::Tuple(fields.move_map(|f| fld.fold_struct_field(f)),
-                                    fld.new_id(id))
-        }
-        ast::VariantData::Unit(id) => ast::VariantData::Unit(fld.new_id(id))
-    }
-}
-
-pub fn noop_fold_trait_ref<T: Folder>(p: TraitRef, fld: &mut T) -> TraitRef {
-    let id = fld.new_id(p.ref_id);
-    let TraitRef {
-        path,
-        ref_id: _,
-    } = p;
-    ast::TraitRef {
-        path: fld.fold_path(path),
-        ref_id: id,
-    }
-}
-
-pub fn noop_fold_poly_trait_ref<T: Folder>(p: PolyTraitRef, fld: &mut T) -> PolyTraitRef {
-    ast::PolyTraitRef {
-        bound_generic_params: fld.fold_generic_params(p.bound_generic_params),
-        trait_ref: fld.fold_trait_ref(p.trait_ref),
-        span: fld.new_span(p.span),
-    }
-}
-
-pub fn noop_fold_struct_field<T: Folder>(f: StructField, fld: &mut T) -> StructField {
-    StructField {
-        span: fld.new_span(f.span),
-        id: fld.new_id(f.id),
-        ident: f.ident.map(|ident| fld.fold_ident(ident)),
-        vis: fld.fold_vis(f.vis),
-        ty: fld.fold_ty(f.ty),
-        attrs: fold_attrs(f.attrs, fld),
-    }
-}
-
-pub fn noop_fold_field<T: Folder>(f: Field, folder: &mut T) -> Field {
-    Field {
-        ident: folder.fold_ident(f.ident),
-        expr: folder.fold_expr(f.expr),
-        span: folder.new_span(f.span),
-        is_shorthand: f.is_shorthand,
-        attrs: fold_thin_attrs(f.attrs, folder),
-    }
-}
-
-pub fn noop_fold_mt<T: Folder>(MutTy {ty, mutbl}: MutTy, folder: &mut T) -> MutTy {
-    MutTy {
-        ty: folder.fold_ty(ty),
-        mutbl,
-    }
-}
-
-pub fn noop_fold_opt_bounds<T: Folder>(b: Option<GenericBounds>, folder: &mut T)
-                                       -> Option<GenericBounds> {
-    b.map(|bounds| folder.fold_bounds(bounds))
-}
-
-fn noop_fold_bounds<T: Folder>(bounds: GenericBounds, folder: &mut T)
-                          -> GenericBounds {
-    bounds.move_map(|bound| folder.fold_param_bound(bound))
-}
-
-pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
-    b.map(|Block {id, stmts, rules, span}| Block {
-        id: folder.new_id(id),
-        stmts: stmts.move_flat_map(|s| folder.fold_stmt(s).into_iter()),
-        rules,
-        span: folder.new_span(span),
-    })
-}
-
-pub fn noop_fold_item_kind<T: Folder>(i: ItemKind, folder: &mut T) -> ItemKind {
-    match i {
-        ItemKind::ExternCrate(orig_name) => ItemKind::ExternCrate(orig_name),
-        ItemKind::Use(use_tree) => {
-            ItemKind::Use(use_tree.map(|tree| folder.fold_use_tree(tree)))
-        }
-        ItemKind::Static(t, m, e) => {
-            ItemKind::Static(folder.fold_ty(t), m, folder.fold_expr(e))
-        }
-        ItemKind::Const(t, e) => {
-            ItemKind::Const(folder.fold_ty(t), folder.fold_expr(e))
-        }
-        ItemKind::Fn(decl, header, generics, body) => {
-            let generics = folder.fold_generics(generics);
-            let header = folder.fold_fn_header(header);
-            let decl = folder.fold_fn_decl(decl);
-            let body = folder.fold_block(body);
-            ItemKind::Fn(decl, header, generics, body)
-        }
-        ItemKind::Mod(m) => ItemKind::Mod(folder.fold_mod(m)),
-        ItemKind::ForeignMod(nm) => ItemKind::ForeignMod(folder.fold_foreign_mod(nm)),
-        ItemKind::GlobalAsm(ga) => ItemKind::GlobalAsm(folder.fold_global_asm(ga)),
-        ItemKind::Ty(t, generics) => {
-            ItemKind::Ty(folder.fold_ty(t), folder.fold_generics(generics))
-        }
-        ItemKind::Existential(bounds, generics) => ItemKind::Existential(
-            folder.fold_bounds(bounds),
-            folder.fold_generics(generics),
-        ),
-        ItemKind::Enum(enum_definition, generics) => {
-            let generics = folder.fold_generics(generics);
-            let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x));
-            ItemKind::Enum(ast::EnumDef { variants }, generics)
-        }
-        ItemKind::Struct(struct_def, generics) => {
-            let generics = folder.fold_generics(generics);
-            ItemKind::Struct(folder.fold_variant_data(struct_def), generics)
-        }
-        ItemKind::Union(struct_def, generics) => {
-            let generics = folder.fold_generics(generics);
-            ItemKind::Union(folder.fold_variant_data(struct_def), generics)
-        }
-        ItemKind::Impl(unsafety,
-                       polarity,
-                       defaultness,
-                       generics,
-                       ifce,
-                       ty,
-                       impl_items) => ItemKind::Impl(
-            unsafety,
-            polarity,
-            defaultness,
-            folder.fold_generics(generics),
-            ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref)),
-            folder.fold_ty(ty),
-            impl_items.move_flat_map(|item| folder.fold_impl_item(item)),
-        ),
-        ItemKind::Trait(is_auto, unsafety, generics, bounds, items) => ItemKind::Trait(
-            is_auto,
-            unsafety,
-            folder.fold_generics(generics),
-            folder.fold_bounds(bounds),
-            items.move_flat_map(|item| folder.fold_trait_item(item)),
-        ),
-        ItemKind::TraitAlias(generics, bounds) => ItemKind::TraitAlias(
-            folder.fold_generics(generics),
-            folder.fold_bounds(bounds)),
-        ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)),
-        ItemKind::MacroDef(def) => ItemKind::MacroDef(folder.fold_macro_def(def)),
-    }
-}
-
-pub fn noop_fold_trait_item<T: Folder>(i: TraitItem, folder: &mut T) -> SmallVec<[TraitItem; 1]> {
-    smallvec![TraitItem {
-        id: folder.new_id(i.id),
-        ident: folder.fold_ident(i.ident),
-        attrs: fold_attrs(i.attrs, folder),
-        generics: folder.fold_generics(i.generics),
-        node: match i.node {
-            TraitItemKind::Const(ty, default) => {
-                TraitItemKind::Const(folder.fold_ty(ty),
-                               default.map(|x| folder.fold_expr(x)))
-            }
-            TraitItemKind::Method(sig, body) => {
-                TraitItemKind::Method(noop_fold_method_sig(sig, folder),
-                                body.map(|x| folder.fold_block(x)))
-            }
-            TraitItemKind::Type(bounds, default) => {
-                TraitItemKind::Type(folder.fold_bounds(bounds),
-                              default.map(|x| folder.fold_ty(x)))
-            }
-            ast::TraitItemKind::Macro(mac) => {
-                TraitItemKind::Macro(folder.fold_mac(mac))
-            }
-        },
-        span: folder.new_span(i.span),
-        tokens: i.tokens,
-    }]
-}
-
-pub fn noop_fold_impl_item<T: Folder>(i: ImplItem, folder: &mut T)-> SmallVec<[ImplItem; 1]> {
-    smallvec![ImplItem {
-        id: folder.new_id(i.id),
-        vis: folder.fold_vis(i.vis),
-        ident: folder.fold_ident(i.ident),
-        attrs: fold_attrs(i.attrs, folder),
-        generics: folder.fold_generics(i.generics),
-        defaultness: i.defaultness,
-        node: match i.node  {
-            ast::ImplItemKind::Const(ty, expr) => {
-                ast::ImplItemKind::Const(folder.fold_ty(ty), folder.fold_expr(expr))
-            }
-            ast::ImplItemKind::Method(sig, body) => {
-                ast::ImplItemKind::Method(noop_fold_method_sig(sig, folder),
-                               folder.fold_block(body))
-            }
-            ast::ImplItemKind::Type(ty) => ast::ImplItemKind::Type(folder.fold_ty(ty)),
-            ast::ImplItemKind::Existential(bounds) => {
-                ast::ImplItemKind::Existential(folder.fold_bounds(bounds))
-            },
-            ast::ImplItemKind::Macro(mac) => ast::ImplItemKind::Macro(folder.fold_mac(mac))
-        },
-        span: folder.new_span(i.span),
-        tokens: i.tokens,
-    }]
-}
-
-pub fn noop_fold_fn_header<T: Folder>(mut header: FnHeader, folder: &mut T) -> FnHeader {
-    header.asyncness = folder.fold_asyncness(header.asyncness);
-    header
-}
-
-pub fn noop_fold_mod<T: Folder>(Mod {inner, items, inline}: Mod, folder: &mut T) -> Mod {
-    Mod {
-        inner: folder.new_span(inner),
-        items: items.move_flat_map(|x| folder.fold_item(x)),
-        inline: inline,
-    }
-}
-
-pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, span}: Crate,
-                                  folder: &mut T) -> Crate {
-    let mut items = folder.fold_item(P(ast::Item {
-        ident: keywords::Invalid.ident(),
-        attrs,
-        id: ast::DUMMY_NODE_ID,
-        vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Public),
-        span,
-        node: ast::ItemKind::Mod(module),
-        tokens: None,
-    })).into_iter();
-
-    let (module, attrs, span) = match items.next() {
-        Some(item) => {
-            assert!(items.next().is_none(),
-                    "a crate cannot expand to more than one item");
-            item.and_then(|ast::Item { attrs, span, node, .. }| {
-                match node {
-                    ast::ItemKind::Mod(m) => (m, attrs, span),
-                    _ => panic!("fold converted a module to not a module"),
-                }
-            })
-        }
-        None => (ast::Mod {
-            inner: span,
-            items: vec![],
-            inline: true,
-        }, vec![], span)
-    };
-
-    Crate {
-        module,
-        attrs,
-        span,
-    }
-}
-
-// fold one item into possibly many items
-pub fn noop_fold_item<T: Folder>(i: P<Item>, folder: &mut T) -> SmallVec<[P<Item>; 1]> {
-    smallvec![i.map(|i| folder.fold_item_simple(i))]
-}
-
-// fold one item into exactly one item
-pub fn noop_fold_item_simple<T: Folder>(Item {id, ident, attrs, node, vis, span, tokens}: Item,
-                                        folder: &mut T) -> Item {
-    Item {
-        id: folder.new_id(id),
-        vis: folder.fold_vis(vis),
-        ident: folder.fold_ident(ident),
-        attrs: fold_attrs(attrs, folder),
-        node: folder.fold_item_kind(node),
-        span: folder.new_span(span),
-
-        // FIXME: if this is replaced with a call to `folder.fold_tts` it causes
-        //        an ICE during resolve... odd!
-        tokens,
-    }
-}
-
-pub fn noop_fold_foreign_item<T: Folder>(ni: ForeignItem, folder: &mut T)
-    -> SmallVec<[ForeignItem; 1]>
-{
-    smallvec![folder.fold_foreign_item_simple(ni)]
-}
-
-pub fn noop_fold_foreign_item_simple<T: Folder>(ni: ForeignItem, folder: &mut T) -> ForeignItem {
-    ForeignItem {
-        id: folder.new_id(ni.id),
-        vis: folder.fold_vis(ni.vis),
-        ident: folder.fold_ident(ni.ident),
-        attrs: fold_attrs(ni.attrs, folder),
-        node: match ni.node {
-            ForeignItemKind::Fn(fdec, generics) => {
-                ForeignItemKind::Fn(folder.fold_fn_decl(fdec), folder.fold_generics(generics))
-            }
-            ForeignItemKind::Static(t, m) => {
-                ForeignItemKind::Static(folder.fold_ty(t), m)
-            }
-            ForeignItemKind::Ty => ForeignItemKind::Ty,
-            ForeignItemKind::Macro(mac) => ForeignItemKind::Macro(folder.fold_mac(mac)),
-        },
-        span: folder.new_span(ni.span)
-    }
-}
-
-pub fn noop_fold_method_sig<T: Folder>(sig: MethodSig, folder: &mut T) -> MethodSig {
-    MethodSig {
-        header: folder.fold_fn_header(sig.header),
-        decl: folder.fold_fn_decl(sig.decl)
-    }
-}
-
-pub fn noop_fold_pat<T: Folder>(p: P<Pat>, folder: &mut T) -> P<Pat> {
-    p.map(|Pat {id, node, span}| Pat {
-        id: folder.new_id(id),
-        node: match node {
-            PatKind::Wild => PatKind::Wild,
-            PatKind::Ident(binding_mode, ident, sub) => {
-                PatKind::Ident(binding_mode,
-                               folder.fold_ident(ident),
-                               sub.map(|x| folder.fold_pat(x)))
-            }
-            PatKind::Lit(e) => PatKind::Lit(folder.fold_expr(e)),
-            PatKind::TupleStruct(pth, pats, ddpos) => {
-                PatKind::TupleStruct(folder.fold_path(pth),
-                        pats.move_map(|x| folder.fold_pat(x)), ddpos)
-            }
-            PatKind::Path(qself, pth) => {
-                let (qself, pth) = folder.fold_qpath(qself, pth);
-                PatKind::Path(qself, pth)
-            }
-            PatKind::Struct(pth, fields, etc) => {
-                let pth = folder.fold_path(pth);
-                let fs = fields.move_map(|f| {
-                    Spanned { span: folder.new_span(f.span),
-                              node: ast::FieldPat {
-                                  ident: folder.fold_ident(f.node.ident),
-                                  pat: folder.fold_pat(f.node.pat),
-                                  is_shorthand: f.node.is_shorthand,
-                                  attrs: fold_attrs(f.node.attrs.into(), folder).into()
-                              }}
-                });
-                PatKind::Struct(pth, fs, etc)
-            }
-            PatKind::Tuple(elts, ddpos) => {
-                PatKind::Tuple(elts.move_map(|x| folder.fold_pat(x)), ddpos)
-            }
-            PatKind::Box(inner) => PatKind::Box(folder.fold_pat(inner)),
-            PatKind::Ref(inner, mutbl) => PatKind::Ref(folder.fold_pat(inner), mutbl),
-            PatKind::Range(e1, e2, Spanned { span, node: end }) => {
-                PatKind::Range(folder.fold_expr(e1),
-                               folder.fold_expr(e2),
-                               Spanned { span, node: folder.fold_range_end(end) })
-            },
-            PatKind::Slice(before, slice, after) => {
-                PatKind::Slice(before.move_map(|x| folder.fold_pat(x)),
-                       slice.map(|x| folder.fold_pat(x)),
-                       after.move_map(|x| folder.fold_pat(x)))
-            }
-            PatKind::Paren(inner) => PatKind::Paren(folder.fold_pat(inner)),
-            PatKind::Mac(mac) => PatKind::Mac(folder.fold_mac(mac))
-        },
-        span: folder.new_span(span)
-    })
-}
-
-pub fn noop_fold_range_end<T: Folder>(end: RangeEnd, _folder: &mut T) -> RangeEnd {
-    end
-}
-
-pub fn noop_fold_anon_const<T: Folder>(constant: AnonConst, folder: &mut T) -> AnonConst {
-    let AnonConst {id, value} = constant;
-    AnonConst {
-        id: folder.new_id(id),
-        value: folder.fold_expr(value),
-    }
-}
-
-pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mut T) -> Expr {
-    Expr {
-        node: match node {
-            ExprKind::Box(e) => {
-                ExprKind::Box(folder.fold_expr(e))
-            }
-            ExprKind::ObsoleteInPlace(a, b) => {
-                ExprKind::ObsoleteInPlace(folder.fold_expr(a), folder.fold_expr(b))
-            }
-            ExprKind::Array(exprs) => {
-                ExprKind::Array(folder.fold_exprs(exprs))
-            }
-            ExprKind::Repeat(expr, count) => {
-                ExprKind::Repeat(folder.fold_expr(expr), folder.fold_anon_const(count))
-            }
-            ExprKind::Tup(exprs) => ExprKind::Tup(folder.fold_exprs(exprs)),
-            ExprKind::Call(f, args) => {
-                ExprKind::Call(folder.fold_expr(f),
-                         folder.fold_exprs(args))
-            }
-            ExprKind::MethodCall(seg, args) => {
-                ExprKind::MethodCall(
-                    PathSegment {
-                        ident: folder.fold_ident(seg.ident),
-                        id: folder.new_id(seg.id),
-                        args: seg.args.map(|args| {
-                            args.map(|args| folder.fold_generic_args(args))
-                        }),
-                    },
-                    folder.fold_exprs(args))
-            }
-            ExprKind::Binary(binop, lhs, rhs) => {
-                ExprKind::Binary(binop,
-                        folder.fold_expr(lhs),
-                        folder.fold_expr(rhs))
-            }
-            ExprKind::Unary(binop, ohs) => {
-                ExprKind::Unary(binop, folder.fold_expr(ohs))
-            }
-            ExprKind::Lit(l) => ExprKind::Lit(l),
-            ExprKind::Cast(expr, ty) => {
-                ExprKind::Cast(folder.fold_expr(expr), folder.fold_ty(ty))
-            }
-            ExprKind::Type(expr, ty) => {
-                ExprKind::Type(folder.fold_expr(expr), folder.fold_ty(ty))
-            }
-            ExprKind::AddrOf(m, ohs) => ExprKind::AddrOf(m, folder.fold_expr(ohs)),
-            ExprKind::If(cond, tr, fl) => {
-                ExprKind::If(folder.fold_expr(cond),
-                       folder.fold_block(tr),
-                       fl.map(|x| folder.fold_expr(x)))
-            }
-            ExprKind::IfLet(pats, expr, tr, fl) => {
-                ExprKind::IfLet(pats.move_map(|pat| folder.fold_pat(pat)),
-                          folder.fold_expr(expr),
-                          folder.fold_block(tr),
-                          fl.map(|x| folder.fold_expr(x)))
-            }
-            ExprKind::While(cond, body, opt_label) => {
-                ExprKind::While(folder.fold_expr(cond),
-                          folder.fold_block(body),
-                          opt_label.map(|label| folder.fold_label(label)))
-            }
-            ExprKind::WhileLet(pats, expr, body, opt_label) => {
-                ExprKind::WhileLet(pats.move_map(|pat| folder.fold_pat(pat)),
-                             folder.fold_expr(expr),
-                             folder.fold_block(body),
-                             opt_label.map(|label| folder.fold_label(label)))
-            }
-            ExprKind::ForLoop(pat, iter, body, opt_label) => {
-                ExprKind::ForLoop(folder.fold_pat(pat),
-                            folder.fold_expr(iter),
-                            folder.fold_block(body),
-                            opt_label.map(|label| folder.fold_label(label)))
-            }
-            ExprKind::Loop(body, opt_label) => {
-                ExprKind::Loop(folder.fold_block(body),
-                               opt_label.map(|label| folder.fold_label(label)))
-            }
-            ExprKind::Match(expr, arms) => {
-                ExprKind::Match(folder.fold_expr(expr),
-                          arms.move_map(|x| folder.fold_arm(x)))
-            }
-            ExprKind::Closure(capture_clause, asyncness, movability, decl, body, span) => {
-                ExprKind::Closure(capture_clause,
-                                  folder.fold_asyncness(asyncness),
-                                  movability,
-                                  folder.fold_fn_decl(decl),
-                                  folder.fold_expr(body),
-                                  folder.new_span(span))
-            }
-            ExprKind::Block(blk, opt_label) => {
-                ExprKind::Block(folder.fold_block(blk),
-                                opt_label.map(|label| folder.fold_label(label)))
-            }
-            ExprKind::Async(capture_clause, node_id, body) => {
-                ExprKind::Async(
-                    capture_clause,
-                    folder.new_id(node_id),
-                    folder.fold_block(body),
-                )
-            }
-            ExprKind::Assign(el, er) => {
-                ExprKind::Assign(folder.fold_expr(el), folder.fold_expr(er))
-            }
-            ExprKind::AssignOp(op, el, er) => {
-                ExprKind::AssignOp(op,
-                            folder.fold_expr(el),
-                            folder.fold_expr(er))
-            }
-            ExprKind::Field(el, ident) => {
-                ExprKind::Field(folder.fold_expr(el), folder.fold_ident(ident))
-            }
-            ExprKind::Index(el, er) => {
-                ExprKind::Index(folder.fold_expr(el), folder.fold_expr(er))
-            }
-            ExprKind::Range(e1, e2, lim) => {
-                ExprKind::Range(e1.map(|x| folder.fold_expr(x)),
-                                e2.map(|x| folder.fold_expr(x)),
-                                lim)
-            }
-            ExprKind::Path(qself, path) => {
-                let (qself, path) = folder.fold_qpath(qself, path);
-                ExprKind::Path(qself, path)
-            }
-            ExprKind::Break(opt_label, opt_expr) => {
-                ExprKind::Break(opt_label.map(|label| folder.fold_label(label)),
-                                opt_expr.map(|e| folder.fold_expr(e)))
-            }
-            ExprKind::Continue(opt_label) => {
-                ExprKind::Continue(opt_label.map(|label| folder.fold_label(label)))
-            }
-            ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))),
-            ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| {
-                InlineAsm {
-                    inputs: asm.inputs.move_map(|(c, input)| {
-                        (c, folder.fold_expr(input))
-                    }),
-                    outputs: asm.outputs.move_map(|out| {
-                        InlineAsmOutput {
-                            constraint: out.constraint,
-                            expr: folder.fold_expr(out.expr),
-                            is_rw: out.is_rw,
-                            is_indirect: out.is_indirect,
-                        }
-                    }),
-                    ..asm
-                }
-            })),
-            ExprKind::Mac(mac) => ExprKind::Mac(folder.fold_mac(mac)),
-            ExprKind::Struct(path, fields, maybe_expr) => {
-                ExprKind::Struct(folder.fold_path(path),
-                        fields.move_map(|x| folder.fold_field(x)),
-                        maybe_expr.map(|x| folder.fold_expr(x)))
-            },
-            ExprKind::Paren(ex) => {
-                let sub_expr = folder.fold_expr(ex);
-                return Expr {
-                    // Nodes that are equal modulo `Paren` sugar no-ops should have the same ids.
-                    id: sub_expr.id,
-                    node: ExprKind::Paren(sub_expr),
-                    span: folder.new_span(span),
-                    attrs: fold_attrs(attrs.into(), folder).into(),
-                };
-            }
-            ExprKind::Yield(ex) => ExprKind::Yield(ex.map(|x| folder.fold_expr(x))),
-            ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)),
-            ExprKind::TryBlock(body) => ExprKind::TryBlock(folder.fold_block(body)),
-            ExprKind::Err => ExprKind::Err,
-        },
-        id: folder.new_id(id),
-        span: folder.new_span(span),
-        attrs: fold_attrs(attrs.into(), folder).into(),
-    }
-}
-
-pub fn noop_fold_opt_expr<T: Folder>(e: P<Expr>, folder: &mut T) -> Option<P<Expr>> {
-    Some(folder.fold_expr(e))
-}
-
-pub fn noop_fold_exprs<T: Folder>(es: Vec<P<Expr>>, folder: &mut T) -> Vec<P<Expr>> {
-    es.move_flat_map(|e| folder.fold_opt_expr(e))
-}
-
-pub fn noop_fold_stmt<T: Folder>(Stmt {node, span, id}: Stmt, folder: &mut T) -> SmallVec<[Stmt; 1]>
-{
-    let id = folder.new_id(id);
-    let span = folder.new_span(span);
-    noop_fold_stmt_kind(node, folder).into_iter().map(|node| {
-        Stmt { id: id, node: node, span: span }
-    }).collect()
-}
-
-pub fn noop_fold_stmt_kind<T: Folder>(node: StmtKind, folder: &mut T) -> SmallVec<[StmtKind; 1]> {
-    match node {
-        StmtKind::Local(local) => smallvec![StmtKind::Local(folder.fold_local(local))],
-        StmtKind::Item(item) => folder.fold_item(item).into_iter().map(StmtKind::Item).collect(),
-        StmtKind::Expr(expr) => {
-            folder.fold_opt_expr(expr).into_iter().map(StmtKind::Expr).collect()
-        }
-        StmtKind::Semi(expr) => {
-            folder.fold_opt_expr(expr).into_iter().map(StmtKind::Semi).collect()
-        }
-        StmtKind::Mac(mac) => smallvec![StmtKind::Mac(mac.map(|(mac, semi, attrs)| {
-            (folder.fold_mac(mac), semi, fold_attrs(attrs.into(), folder).into())
-        }))],
-    }
-}
-
-pub fn noop_fold_vis<T: Folder>(vis: Visibility, folder: &mut T) -> Visibility {
-    match vis.node {
-        VisibilityKind::Restricted { path, id } => {
-            respan(vis.span, VisibilityKind::Restricted {
-                path: path.map(|path| folder.fold_path(path)),
-                id: folder.new_id(id),
-            })
-        }
-        _ => vis,
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use std::io;
-    use ast::{self, Ident};
-    use util::parser_testing::{string_to_crate, matches_codepattern};
-    use print::pprust;
-    use fold;
-    use with_globals;
-    use super::*;
-
-    // this version doesn't care about getting comments or docstrings in.
-    fn fake_print_crate(s: &mut pprust::State,
-                        krate: &ast::Crate) -> io::Result<()> {
-        s.print_mod(&krate.module, &krate.attrs)
-    }
-
-    // change every identifier to "zz"
-    struct ToZzIdentFolder;
-
-    impl Folder for ToZzIdentFolder {
-        fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident {
-            Ident::from_str("zz")
-        }
-        fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
-            fold::noop_fold_mac(mac, self)
-        }
-    }
-
-    // maybe add to expand.rs...
-    macro_rules! assert_pred {
-        ($pred:expr, $predname:expr, $a:expr , $b:expr) => (
-            {
-                let pred_val = $pred;
-                let a_val = $a;
-                let b_val = $b;
-                if !(pred_val(&a_val, &b_val)) {
-                    panic!("expected args satisfying {}, got {} and {}",
-                          $predname, a_val, b_val);
-                }
-            }
-        )
-    }
-
-    // make sure idents get transformed everywhere
-    #[test] fn ident_transformation () {
-        with_globals(|| {
-            let mut zz_fold = ToZzIdentFolder;
-            let ast = string_to_crate(
-                "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
-            let folded_crate = zz_fold.fold_crate(ast);
-            assert_pred!(
-                matches_codepattern,
-                "matches_codepattern",
-                pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
-                "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
-        })
-    }
-
-    // even inside macro defs....
-    #[test] fn ident_transformation_in_defs () {
-        with_globals(|| {
-            let mut zz_fold = ToZzIdentFolder;
-            let ast = string_to_crate(
-                "macro_rules! a {(b $c:expr $(d $e:token)f+ => \
-                (g $(d $d $e)+))} ".to_string());
-            let folded_crate = zz_fold.fold_crate(ast);
-            assert_pred!(
-                matches_codepattern,
-                "matches_codepattern",
-                pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
-                "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
-        })
-    }
-}
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index cf11ac550b7..af785050532 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -9,13 +9,13 @@
 
 // FIXME: spec the JSON output properly.
 
-use source_map::{SourceMap, FilePathMapping};
-use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
-use errors::registry::Registry;
-use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
-use errors::{DiagnosticId, Applicability};
-use errors::emitter::{Emitter, EmitterWriter};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::registry::Registry;
+use crate::errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
+use crate::errors::{DiagnosticId, Applicability};
+use crate::errors::emitter::{Emitter, EmitterWriter};
 
+use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
 use rustc_data_structures::sync::{self, Lrc};
 use std::io::{self, Write};
 use std::vec;
@@ -69,7 +69,7 @@ impl JsonEmitter {
 }
 
 impl Emitter for JsonEmitter {
-    fn emit(&mut self, db: &DiagnosticBuilder) {
+    fn emit(&mut self, db: &DiagnosticBuilder<'_>) {
         let data = Diagnostic::from_diagnostic_builder(db, self);
         let result = if self.pretty {
             writeln!(&mut self.dst, "{}", as_pretty_json(&data))
@@ -159,7 +159,7 @@ struct DiagnosticCode {
 }
 
 impl Diagnostic {
-    fn from_diagnostic_builder(db: &DiagnosticBuilder,
+    fn from_diagnostic_builder(db: &DiagnosticBuilder<'_>,
                                je: &JsonEmitter)
                                -> Diagnostic {
         let sugg = db.suggestions.iter().map(|sugg| {
@@ -342,7 +342,7 @@ impl DiagnosticSpanLine {
         }
     }
 
-    /// Create a list of DiagnosticSpanLines from span - each line with any part
+    /// Creates a list of DiagnosticSpanLines from span - each line with any part
     /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
     /// `span` within the line.
     fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index b2a3ae7f9d9..a56cdf623bf 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -4,38 +4,27 @@
 //!
 //! This API is completely unstable and subject to change.
 
-#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
-       html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
-       html_root_url = "https://doc.rust-lang.org/nightly/",
+#![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
        test(attr(deny(warnings))))]
 
+#![deny(rust_2018_idioms)]
+
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
 #![feature(nll)]
 #![feature(rustc_attrs)]
 #![feature(rustc_diagnostic_macros)]
 #![feature(slice_sort_by_cached_key)]
-#![feature(str_escape)]
 #![feature(step_trait)]
 #![feature(try_trait)]
 #![feature(unicode_internals)]
 
 #![recursion_limit="256"]
 
-#[macro_use] extern crate bitflags;
-extern crate core;
-extern crate serialize;
-#[macro_use] extern crate log;
-pub extern crate rustc_errors as errors;
-extern crate syntax_pos;
-#[macro_use] extern crate rustc_data_structures;
-extern crate rustc_target;
-#[macro_use] extern crate scoped_tls;
-#[macro_use]
-extern crate smallvec;
-
+#[allow(unused_extern_crates)]
 extern crate serialize as rustc_serialize; // used by deriving
 
+pub use rustc_errors as errors;
 use rustc_data_structures::sync::Lock;
 use rustc_data_structures::bit_set::GrowableBitSet;
 pub use rustc_data_structures::thin_vec::ThinVec;
@@ -48,7 +37,7 @@ use ast::AttrId;
 macro_rules! panictry {
     ($e:expr) => ({
         use std::result::Result::{Ok, Err};
-        use errors::FatalError;
+        use crate::errors::FatalError;
         match $e {
             Ok(e) => e,
             Err(mut e) => {
@@ -63,7 +52,7 @@ macro_rules! panictry {
 macro_rules! panictry_buffer {
     ($handler:expr, $e:expr) => ({
         use std::result::Result::{Ok, Err};
-        use errors::{FatalError, DiagnosticBuilder};
+        use crate::errors::{FatalError, DiagnosticBuilder};
         match $e {
             Ok(e) => e,
             Err(errs) => {
@@ -113,7 +102,7 @@ pub fn with_globals<F, R>(f: F) -> R
     })
 }
 
-scoped_thread_local!(pub static GLOBALS: Globals);
+scoped_tls::scoped_thread_local!(pub static GLOBALS: Globals);
 
 #[macro_use]
 pub mod diagnostics {
@@ -133,15 +122,15 @@ pub mod util {
     pub mod parser;
     #[cfg(test)]
     pub mod parser_testing;
-    pub mod move_map;
+    pub mod map_in_place;
 }
 
 pub mod json;
 
 pub mod syntax {
-    pub use ext;
-    pub use parse;
-    pub use ast;
+    pub use crate::ext;
+    pub use crate::parse;
+    pub use crate::ast;
 }
 
 pub mod ast;
@@ -151,7 +140,7 @@ pub mod source_map;
 pub mod config;
 pub mod entry;
 pub mod feature_gate;
-pub mod fold;
+pub mod mut_visit;
 pub mod parse;
 pub mod ptr;
 pub mod show_span;
diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs
new file mode 100644
index 00000000000..1e5eb0992bd
--- /dev/null
+++ b/src/libsyntax/mut_visit.rs
@@ -0,0 +1,1335 @@
+//! A MutVisitor represents an AST modification; it accepts an AST piece and
+//! and mutates it in place. So, for instance, macro expansion is a MutVisitor
+//! that walks over an AST and modifies it.
+//!
+//! Note: using a MutVisitor (other than the MacroExpander MutVisitor) on
+//! an AST before macro expansion is probably a bad idea. For instance,
+//! a MutVisitor renaming item names in a module will miss all of those
+//! that are created by the expansion of a macro.
+
+use crate::ast::*;
+use crate::source_map::{Spanned, respan};
+use crate::parse::token::{self, Token};
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::ThinVec;
+use crate::tokenstream::*;
+use crate::util::map_in_place::MapInPlace;
+
+use smallvec::{smallvec, Array, SmallVec};
+use syntax_pos::Span;
+
+use rustc_data_structures::sync::Lrc;
+use std::ops::DerefMut;
+
+pub trait ExpectOne<A: Array> {
+    fn expect_one(self, err: &'static str) -> A::Item;
+}
+
+impl<A: Array> ExpectOne<A> for SmallVec<A> {
+    fn expect_one(self, err: &'static str) -> A::Item {
+        assert!(self.len() == 1, err);
+        self.into_iter().next().unwrap()
+    }
+}
+
+pub trait MutVisitor: Sized {
+    // Methods in this trait have one of three forms:
+    //
+    //   fn visit_t(&mut self, t: &mut T);                      // common
+    //   fn flat_map_t(&mut self, t: T) -> SmallVec<[T; 1]>;    // rare
+    //   fn filter_map_t(&mut self, t: T) -> Option<T>;         // rarest
+    //
+    // Any additions to this trait should happen in form of a call to a public
+    // `noop_*` function that only calls out to the visitor again, not other
+    // `noop_*` functions. This is a necessary API workaround to the problem of
+    // not being able to call out to the super default method in an overridden
+    // default method.
+    //
+    // When writing these methods, it is better to use destructuring like this:
+    //
+    //   fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) {
+    //       visit_a(a);
+    //       visit_b(b);
+    //   }
+    //
+    // than to use field access like this:
+    //
+    //   fn visit_abc(&mut self, abc: &mut ABC) {
+    //       visit_a(&mut abc.a);
+    //       visit_b(&mut abc.b);
+    //       // ignore abc.c
+    //   }
+    //
+    // As well as being more concise, the former is explicit about which fields
+    // are skipped. Furthermore, if a new field is added, the destructuring
+    // version will cause a compile error, which is good. In comparison, the
+    // field access version will continue working and it would be easy to
+    // forget to add handling for it.
+
+    fn visit_crate(&mut self, c: &mut Crate) {
+        noop_visit_crate(c, self)
+    }
+
+    fn visit_meta_list_item(&mut self, list_item: &mut NestedMetaItem) {
+        noop_visit_meta_list_item(list_item, self);
+    }
+
+    fn visit_meta_item(&mut self, meta_item: &mut MetaItem) {
+        noop_visit_meta_item(meta_item, self);
+    }
+
+    fn visit_use_tree(&mut self, use_tree: &mut UseTree) {
+        noop_visit_use_tree(use_tree, self);
+    }
+
+    fn flat_map_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]> {
+        noop_flat_map_foreign_item(ni, self)
+    }
+
+    fn flat_map_item(&mut self, i: P<Item>) -> SmallVec<[P<Item>; 1]> {
+        noop_flat_map_item(i, self)
+    }
+
+    fn visit_fn_header(&mut self, header: &mut FnHeader) {
+        noop_visit_fn_header(header, self);
+    }
+
+    fn visit_struct_field(&mut self, sf: &mut StructField) {
+        noop_visit_struct_field(sf, self);
+    }
+
+    fn visit_item_kind(&mut self, i: &mut ItemKind) {
+        noop_visit_item_kind(i, self);
+    }
+
+    fn flat_map_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> {
+        noop_flat_map_trait_item(i, self)
+    }
+
+    fn flat_map_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> {
+        noop_flat_map_impl_item(i, self)
+    }
+
+    fn visit_fn_decl(&mut self, d: &mut P<FnDecl>) {
+        noop_visit_fn_decl(d, self);
+    }
+
+    fn visit_asyncness(&mut self, a: &mut IsAsync) {
+        noop_visit_asyncness(a, self);
+    }
+
+    fn visit_block(&mut self, b: &mut P<Block>) {
+        noop_visit_block(b, self);
+    }
+
+    fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> {
+        noop_flat_map_stmt(s, self)
+    }
+
+    fn visit_arm(&mut self, a: &mut Arm) {
+        noop_visit_arm(a, self);
+    }
+
+    fn visit_guard(&mut self, g: &mut Guard) {
+        noop_visit_guard(g, self);
+    }
+
+    fn visit_pat(&mut self, p: &mut P<Pat>) {
+        noop_visit_pat(p, self);
+    }
+
+    fn visit_anon_const(&mut self, c: &mut AnonConst) {
+        noop_visit_anon_const(c, self);
+    }
+
+    fn visit_expr(&mut self, e: &mut P<Expr>) {
+        noop_visit_expr(e, self);
+    }
+
+    fn filter_map_expr(&mut self, e: P<Expr>) -> Option<P<Expr>> {
+        noop_filter_map_expr(e, self)
+    }
+
+    fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
+        noop_visit_generic_arg(arg, self);
+    }
+
+    fn visit_ty(&mut self, t: &mut P<Ty>) {
+        noop_visit_ty(t, self);
+    }
+
+    fn visit_lifetime(&mut self, l: &mut Lifetime) {
+        noop_visit_lifetime(l, self);
+    }
+
+    fn visit_ty_binding(&mut self, t: &mut TypeBinding) {
+        noop_visit_ty_binding(t, self);
+    }
+
+    fn visit_mod(&mut self, m: &mut Mod) {
+        noop_visit_mod(m, self);
+    }
+
+    fn visit_foreign_mod(&mut self, nm: &mut ForeignMod) {
+        noop_visit_foreign_mod(nm, self);
+    }
+
+    fn visit_variant(&mut self, v: &mut Variant) {
+        noop_visit_variant(v, self);
+    }
+
+    fn visit_ident(&mut self, i: &mut Ident) {
+        noop_visit_ident(i, self);
+    }
+
+    fn visit_path(&mut self, p: &mut Path) {
+        noop_visit_path(p, self);
+    }
+
+    fn visit_qself(&mut self, qs: &mut Option<QSelf>) {
+        noop_visit_qself(qs, self);
+    }
+
+    fn visit_generic_args(&mut self, p: &mut GenericArgs) {
+        noop_visit_generic_args(p, self);
+    }
+
+    fn visit_angle_bracketed_parameter_data(&mut self, p: &mut AngleBracketedArgs) {
+        noop_visit_angle_bracketed_parameter_data(p, self);
+    }
+
+    fn visit_parenthesized_parameter_data(&mut self, p: &mut ParenthesizedArgs) {
+        noop_visit_parenthesized_parameter_data(p, self);
+    }
+
+    fn visit_local(&mut self, l: &mut P<Local>) {
+        noop_visit_local(l, self);
+    }
+
+    fn visit_mac(&mut self, _mac: &mut Mac) {
+        panic!("visit_mac disabled by default");
+        // N.B., see note about macros above. If you really want a visitor that
+        // works on macros, use this definition in your trait impl:
+        //   mut_visit::noop_visit_mac(_mac, self);
+    }
+
+    fn visit_macro_def(&mut self, def: &mut MacroDef) {
+        noop_visit_macro_def(def, self);
+    }
+
+    fn visit_label(&mut self, label: &mut Label) {
+        noop_visit_label(label, self);
+    }
+
+    fn visit_attribute(&mut self, at: &mut Attribute) {
+        noop_visit_attribute(at, self);
+    }
+
+    fn visit_arg(&mut self, a: &mut Arg) {
+        noop_visit_arg(a, self);
+    }
+
+    fn visit_generics(&mut self, generics: &mut Generics) {
+        noop_visit_generics(generics, self);
+    }
+
+    fn visit_trait_ref(&mut self, tr: &mut TraitRef) {
+        noop_visit_trait_ref(tr, self);
+    }
+
+    fn visit_poly_trait_ref(&mut self, p: &mut PolyTraitRef) {
+        noop_visit_poly_trait_ref(p, self);
+    }
+
+    fn visit_variant_data(&mut self, vdata: &mut VariantData) {
+        noop_visit_variant_data(vdata, self);
+    }
+
+    fn visit_generic_param(&mut self, param: &mut GenericParam) {
+        noop_visit_generic_param(param, self);
+    }
+
+    fn visit_generic_params(&mut self, params: &mut Vec<GenericParam>) {
+        noop_visit_generic_params(params, self);
+    }
+
+    fn visit_tt(&mut self, tt: &mut TokenTree) {
+        noop_visit_tt(tt, self);
+    }
+
+    fn visit_tts(&mut self, tts: &mut TokenStream) {
+        noop_visit_tts(tts, self);
+    }
+
+    fn visit_token(&mut self, t: &mut Token) {
+        noop_visit_token(t, self);
+    }
+
+    fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) {
+        noop_visit_interpolated(nt, self);
+    }
+
+    fn visit_param_bound(&mut self, tpb: &mut GenericBound) {
+        noop_visit_param_bound(tpb, self);
+    }
+
+    fn visit_mt(&mut self, mt: &mut MutTy) {
+        noop_visit_mt(mt, self);
+    }
+
+    fn visit_field(&mut self, field: &mut Field) {
+        noop_visit_field(field, self);
+    }
+
+    fn visit_where_clause(&mut self, where_clause: &mut WhereClause) {
+        noop_visit_where_clause(where_clause, self);
+    }
+
+    fn visit_where_predicate(&mut self, where_predicate: &mut WherePredicate) {
+        noop_visit_where_predicate(where_predicate, self);
+    }
+
+    fn visit_vis(&mut self, vis: &mut Visibility) {
+        noop_visit_vis(vis, self);
+    }
+
+    fn visit_id(&mut self, _id: &mut NodeId) {
+        // Do nothing.
+    }
+
+    fn visit_span(&mut self, _sp: &mut Span) {
+        // Do nothing.
+    }
+}
+
+/// Use a map-style function (`FnOnce(T) -> T`) to overwrite a `&mut T`. Useful
+/// when using a `flat_map_*` or `filter_map_*` method within a `visit_`
+/// method.
+//
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_clobber<T, F>(t: &mut T, f: F) where F: FnOnce(T) -> T {
+    unsafe { std::ptr::write(t, f(std::ptr::read(t))); }
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+#[inline]
+pub fn visit_vec<T, F>(elems: &mut Vec<T>, mut visit_elem: F) where F: FnMut(&mut T) {
+    for elem in elems {
+        visit_elem(elem);
+    }
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+#[inline]
+pub fn visit_opt<T, F>(opt: &mut Option<T>, mut visit_elem: F) where F: FnMut(&mut T) {
+    if let Some(elem) = opt {
+        visit_elem(elem);
+    }
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_attrs<T: MutVisitor>(attrs: &mut Vec<Attribute>, vis: &mut T) {
+    visit_vec(attrs, |attr| vis.visit_attribute(attr));
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_thin_attrs<T: MutVisitor>(attrs: &mut ThinVec<Attribute>, vis: &mut T) {
+    for attr in attrs.iter_mut() {
+        vis.visit_attribute(attr);
+    }
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_exprs<T: MutVisitor>(exprs: &mut Vec<P<Expr>>, vis: &mut T) {
+    exprs.flat_map_in_place(|expr| vis.filter_map_expr(expr))
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_bounds<T: MutVisitor>(bounds: &mut GenericBounds, vis: &mut T) {
+    visit_vec(bounds, |bound| vis.visit_param_bound(bound));
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_method_sig<T: MutVisitor>(MethodSig { header, decl }: &mut MethodSig, vis: &mut T) {
+    vis.visit_fn_header(header);
+    vis.visit_fn_decl(decl);
+}
+
+pub fn noop_visit_use_tree<T: MutVisitor>(use_tree: &mut UseTree, vis: &mut T) {
+    let UseTree { prefix, kind, span } = use_tree;
+    vis.visit_path(prefix);
+    match kind {
+        UseTreeKind::Simple(rename, id1, id2) => {
+            visit_opt(rename, |rename| vis.visit_ident(rename));
+            vis.visit_id(id1);
+            vis.visit_id(id2);
+        }
+        UseTreeKind::Nested(items) => {
+            for (tree, id) in items {
+                vis.visit_use_tree(tree);
+                vis.visit_id(id);
+            }
+        }
+        UseTreeKind::Glob => {}
+    }
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_arm<T: MutVisitor>(Arm { attrs, pats, guard, body }: &mut Arm, vis: &mut T) {
+    visit_attrs(attrs, vis);
+    visit_vec(pats, |pat| vis.visit_pat(pat));
+    visit_opt(guard, |guard| vis.visit_guard(guard));
+    vis.visit_expr(body);
+}
+
+pub fn noop_visit_guard<T: MutVisitor>(g: &mut Guard, vis: &mut T) {
+    match g {
+        Guard::If(e) => vis.visit_expr(e),
+    }
+}
+
+pub fn noop_visit_ty_binding<T: MutVisitor>(TypeBinding { id, ident, ty, span }: &mut TypeBinding,
+                                            vis: &mut T) {
+    vis.visit_id(id);
+    vis.visit_ident(ident);
+    vis.visit_ty(ty);
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_ty<T: MutVisitor>(ty: &mut P<Ty>, vis: &mut T) {
+    let Ty { id, node, span } = ty.deref_mut();
+    vis.visit_id(id);
+    match node {
+        TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err | TyKind::Never => {}
+        TyKind::Slice(ty) => vis.visit_ty(ty),
+        TyKind::Ptr(mt) => vis.visit_mt(mt),
+        TyKind::Rptr(lt, mt) => {
+            visit_opt(lt, |lt| noop_visit_lifetime(lt, vis));
+            vis.visit_mt(mt);
+        }
+        TyKind::BareFn(bft) => {
+            let BareFnTy { unsafety: _, abi: _, generic_params, decl } = bft.deref_mut();
+            vis.visit_generic_params(generic_params);
+            vis.visit_fn_decl(decl);
+        }
+        TyKind::Tup(tys) => visit_vec(tys, |ty| vis.visit_ty(ty)),
+        TyKind::Paren(ty) => vis.visit_ty(ty),
+        TyKind::Path(qself, path) => {
+            vis.visit_qself(qself);
+            vis.visit_path(path);
+        }
+        TyKind::Array(ty, length) => {
+            vis.visit_ty(ty);
+            vis.visit_anon_const(length);
+        }
+        TyKind::Typeof(expr) => vis.visit_anon_const(expr),
+        TyKind::TraitObject(bounds, _syntax) =>
+            visit_vec(bounds, |bound| vis.visit_param_bound(bound)),
+        TyKind::ImplTrait(id, bounds) => {
+            vis.visit_id(id);
+            visit_vec(bounds, |bound| vis.visit_param_bound(bound));
+        }
+        TyKind::Mac(mac) => vis.visit_mac(mac),
+    }
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_foreign_mod<T: MutVisitor>(foreign_mod: &mut ForeignMod, vis: &mut T) {
+    let ForeignMod { abi: _, items} = foreign_mod;
+    items.flat_map_in_place(|item| vis.flat_map_foreign_item(item));
+}
+
+pub fn noop_visit_variant<T: MutVisitor>(variant: &mut Variant, vis: &mut T) {
+    let Spanned { node: Variant_ { ident, attrs, data, disr_expr }, span } = variant;
+    vis.visit_ident(ident);
+    visit_attrs(attrs, vis);
+    vis.visit_variant_data(data);
+    visit_opt(disr_expr, |disr_expr| vis.visit_anon_const(disr_expr));
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_ident<T: MutVisitor>(Ident { name: _, span }: &mut Ident, vis: &mut T) {
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_path<T: MutVisitor>(Path { segments, span }: &mut Path, vis: &mut T) {
+    vis.visit_span(span);
+    for PathSegment { ident, id, args } in segments {
+        vis.visit_ident(ident);
+        vis.visit_id(id);
+        visit_opt(args, |args| vis.visit_generic_args(args));
+    }
+}
+
+pub fn noop_visit_qself<T: MutVisitor>(qself: &mut Option<QSelf>, vis: &mut T) {
+    visit_opt(qself, |QSelf { ty, path_span, position: _ }| {
+        vis.visit_ty(ty);
+        vis.visit_span(path_span);
+    })
+}
+
+pub fn noop_visit_generic_args<T: MutVisitor>(generic_args: &mut GenericArgs, vis: &mut T) {
+    match generic_args {
+        GenericArgs::AngleBracketed(data) => vis.visit_angle_bracketed_parameter_data(data),
+        GenericArgs::Parenthesized(data) => vis.visit_parenthesized_parameter_data(data),
+    }
+}
+
+pub fn noop_visit_generic_arg<T: MutVisitor>(arg: &mut GenericArg, vis: &mut T) {
+    match arg {
+        GenericArg::Lifetime(lt) => vis.visit_lifetime(lt),
+        GenericArg::Type(ty) => vis.visit_ty(ty),
+        GenericArg::Const(ct) => vis.visit_anon_const(ct),
+    }
+}
+
+pub fn noop_visit_angle_bracketed_parameter_data<T: MutVisitor>(data: &mut AngleBracketedArgs,
+                                                                vis: &mut T) {
+    let AngleBracketedArgs { args, bindings, span } = data;
+    visit_vec(args, |arg| vis.visit_generic_arg(arg));
+    visit_vec(bindings, |binding| vis.visit_ty_binding(binding));
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_parenthesized_parameter_data<T: MutVisitor>(args: &mut ParenthesizedArgs,
+                                                              vis: &mut T) {
+    let ParenthesizedArgs { inputs, output, span } = args;
+    visit_vec(inputs, |input| vis.visit_ty(input));
+    visit_opt(output, |output| vis.visit_ty(output));
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
+    let Local { id, pat, ty, init, span, attrs } = local.deref_mut();
+    vis.visit_id(id);
+    vis.visit_pat(pat);
+    visit_opt(ty, |ty| vis.visit_ty(ty));
+    visit_opt(init, |init| vis.visit_expr(init));
+    vis.visit_span(span);
+    visit_thin_attrs(attrs, vis);
+}
+
+pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
+    let Attribute { id: _, style: _, path, tokens, is_sugared_doc: _, span } = attr;
+    vis.visit_path(path);
+    vis.visit_tts(tokens);
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_mac<T: MutVisitor>(Spanned { node, span }: &mut Mac, vis: &mut T) {
+    let Mac_ { path, delim: _, tts } = node;
+    vis.visit_path(path);
+    vis.visit_tts(tts);
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_macro_def<T: MutVisitor>(macro_def: &mut MacroDef, vis: &mut T) {
+    let MacroDef { tokens, legacy: _ } = macro_def;
+    vis.visit_tts(tokens);
+}
+
+pub fn noop_visit_meta_list_item<T: MutVisitor>(li: &mut NestedMetaItem, vis: &mut T) {
+    let Spanned { node, span } = li;
+    match node {
+        NestedMetaItemKind::MetaItem(mi) => vis.visit_meta_item(mi),
+        NestedMetaItemKind::Literal(_lit) => {}
+    }
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_meta_item<T: MutVisitor>(mi: &mut MetaItem, vis: &mut T) {
+    let MetaItem { ident: _, node, span } = mi;
+    match node {
+        MetaItemKind::Word => {}
+        MetaItemKind::List(mis) => visit_vec(mis, |mi| vis.visit_meta_list_item(mi)),
+        MetaItemKind::NameValue(_s) => {}
+    }
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T) {
+    vis.visit_id(id);
+    vis.visit_pat(pat);
+    vis.visit_ty(ty);
+}
+
+pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
+    match tt {
+        TokenTree::Token(span, tok) => {
+            vis.visit_span(span);
+            vis.visit_token(tok);
+        }
+        TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+            vis.visit_span(open);
+            vis.visit_span(close);
+            vis.visit_tts(tts);
+        }
+    }
+}
+
+pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
+    visit_opt(tts, |tts| {
+        let tts = Lrc::make_mut(tts);
+        visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
+    })
+}
+
+// apply ident visitor if it's an ident, apply other visits to interpolated nodes
+pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
+    match t {
+        token::Ident(id, _is_raw) => vis.visit_ident(id),
+        token::Lifetime(id) => vis.visit_ident(id),
+        token::Interpolated(nt) => {
+            let nt = Lrc::make_mut(nt);
+            vis.visit_interpolated(&mut nt.0);
+            nt.1 = token::LazyTokenStream::new();
+        }
+        _ => {}
+    }
+}
+
+/// Apply visitor to elements of interpolated nodes.
+//
+// N.B., this can occur only when applying a visitor to partially expanded
+// code, where parsed pieces have gotten implanted ito *other* macro
+// invocations. This is relevant for macro hygiene, but possibly not elsewhere.
+//
+// One problem here occurs because the types for flat_map_item, flat_map_stmt,
+// etc. allow the visitor to return *multiple* items; this is a problem for the
+// nodes here, because they insist on having exactly one piece. One solution
+// would be to mangle the MutVisitor trait to include one-to-many and
+// one-to-one versions of these entry points, but that would probably confuse a
+// lot of people and help very few. Instead, I'm just going to put in dynamic
+// checks. I think the performance impact of this will be pretty much
+// nonexistent. The danger is that someone will apply a MutVisitor to a
+// partially expanded node, and will be confused by the fact that their
+// "flat_map_item" or "flat_map_stmt" isn't getting called on NtItem or NtStmt
+// nodes. Hopefully they'll wind up reading this comment, and doing something
+// appropriate.
+//
+// BTW, design choice: I considered just changing the type of, e.g., NtItem to
+// contain multiple items, but decided against it when I looked at
+// parse_item_or_view_item and tried to figure out what I would do with
+// multiple items there....
+pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis: &mut T) {
+    match nt {
+        token::NtItem(item) =>
+            visit_clobber(item, |item| {
+                // This is probably okay, because the only visitors likely to
+                // peek inside interpolated nodes will be renamings/markings,
+                // which map single items to single items.
+                vis.flat_map_item(item).expect_one("expected visitor to produce exactly one item")
+            }),
+        token::NtBlock(block) => vis.visit_block(block),
+        token::NtStmt(stmt) =>
+            visit_clobber(stmt, |stmt| {
+                // See reasoning above.
+                vis.flat_map_stmt(stmt).expect_one("expected visitor to produce exactly one item")
+            }),
+        token::NtPat(pat) => vis.visit_pat(pat),
+        token::NtExpr(expr) => vis.visit_expr(expr),
+        token::NtTy(ty) => vis.visit_ty(ty),
+        token::NtIdent(ident, _is_raw) => vis.visit_ident(ident),
+        token::NtLifetime(ident) => vis.visit_ident(ident),
+        token::NtLiteral(expr) => vis.visit_expr(expr),
+        token::NtMeta(meta) => vis.visit_meta_item(meta),
+        token::NtPath(path) => vis.visit_path(path),
+        token::NtTT(tt) => vis.visit_tt(tt),
+        token::NtArm(arm) => vis.visit_arm(arm),
+        token::NtImplItem(item) =>
+            visit_clobber(item, |item| {
+                // See reasoning above.
+                vis.flat_map_impl_item(item)
+                    .expect_one("expected visitor to produce exactly one item")
+            }),
+        token::NtTraitItem(item) =>
+            visit_clobber(item, |item| {
+                // See reasoning above.
+                vis.flat_map_trait_item(item)
+                    .expect_one("expected visitor to produce exactly one item")
+            }),
+        token::NtGenerics(generics) => vis.visit_generics(generics),
+        token::NtWhereClause(where_clause) => vis.visit_where_clause(where_clause),
+        token::NtArg(arg) => vis.visit_arg(arg),
+        token::NtVis(visib) => vis.visit_vis(visib),
+        token::NtForeignItem(item) =>
+            visit_clobber(item, |item| {
+                // See reasoning above.
+                vis.flat_map_foreign_item(item)
+                    .expect_one("expected visitor to produce exactly one item")
+            }),
+    }
+}
+
+pub fn noop_visit_asyncness<T: MutVisitor>(asyncness: &mut IsAsync, vis: &mut T) {
+    match asyncness {
+        IsAsync::Async { closure_id, return_impl_trait_id } => {
+            vis.visit_id(closure_id);
+            vis.visit_id(return_impl_trait_id);
+        }
+        IsAsync::NotAsync => {}
+    }
+}
+
+pub fn noop_visit_fn_decl<T: MutVisitor>(decl: &mut P<FnDecl>, vis: &mut T) {
+    let FnDecl { inputs, output, variadic: _ } = decl.deref_mut();
+    visit_vec(inputs, |input| vis.visit_arg(input));
+    match output {
+        FunctionRetTy::Default(span) => vis.visit_span(span),
+        FunctionRetTy::Ty(ty) => vis.visit_ty(ty),
+    }
+}
+
+pub fn noop_visit_param_bound<T: MutVisitor>(pb: &mut GenericBound, vis: &mut T) {
+    match pb {
+        GenericBound::Trait(ty, _modifier) => vis.visit_poly_trait_ref(ty),
+        GenericBound::Outlives(lifetime) => noop_visit_lifetime(lifetime, vis),
+    }
+}
+
+pub fn noop_visit_generic_param<T: MutVisitor>(param: &mut GenericParam, vis: &mut T) {
+    let GenericParam { id, ident, attrs, bounds, kind } = param;
+    vis.visit_id(id);
+    vis.visit_ident(ident);
+    visit_thin_attrs(attrs, vis);
+    visit_vec(bounds, |bound| noop_visit_param_bound(bound, vis));
+    match kind {
+        GenericParamKind::Lifetime => {}
+        GenericParamKind::Type { default } => {
+            visit_opt(default, |default| vis.visit_ty(default));
+        }
+        GenericParamKind::Const { ty } => {
+            vis.visit_ty(ty);
+        }
+    }
+}
+
+pub fn noop_visit_generic_params<T: MutVisitor>(params: &mut Vec<GenericParam>, vis: &mut T){
+    visit_vec(params, |param| vis.visit_generic_param(param));
+}
+
+pub fn noop_visit_label<T: MutVisitor>(Label { ident }: &mut Label, vis: &mut T) {
+    vis.visit_ident(ident);
+}
+
+fn noop_visit_lifetime<T: MutVisitor>(Lifetime { id, ident }: &mut Lifetime, vis: &mut T) {
+    vis.visit_id(id);
+    vis.visit_ident(ident);
+}
+
+pub fn noop_visit_generics<T: MutVisitor>(generics: &mut Generics, vis: &mut T) {
+    let Generics { params, where_clause, span } = generics;
+    vis.visit_generic_params(params);
+    vis.visit_where_clause(where_clause);
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_where_clause<T: MutVisitor>(wc: &mut WhereClause, vis: &mut T) {
+    let WhereClause { id, predicates, span } = wc;
+    vis.visit_id(id);
+    visit_vec(predicates, |predicate| vis.visit_where_predicate(predicate));
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_where_predicate<T: MutVisitor>(pred: &mut WherePredicate, vis: &mut T) {
+    match pred {
+        WherePredicate::BoundPredicate(bp) => {
+            let WhereBoundPredicate { span, bound_generic_params, bounded_ty, bounds } = bp;
+            vis.visit_span(span);
+            vis.visit_generic_params(bound_generic_params);
+            vis.visit_ty(bounded_ty);
+            visit_vec(bounds, |bound| vis.visit_param_bound(bound));
+        }
+        WherePredicate::RegionPredicate(rp) => {
+            let WhereRegionPredicate { span, lifetime, bounds } = rp;
+            vis.visit_span(span);
+            noop_visit_lifetime(lifetime, vis);
+            visit_vec(bounds, |bound| noop_visit_param_bound(bound, vis));
+        }
+        WherePredicate::EqPredicate(ep) => {
+            let WhereEqPredicate { id, span, lhs_ty, rhs_ty } = ep;
+            vis.visit_id(id);
+            vis.visit_span(span);
+            vis.visit_ty(lhs_ty);
+            vis.visit_ty(rhs_ty);
+        }
+    }
+}
+
+pub fn noop_visit_variant_data<T: MutVisitor>(vdata: &mut VariantData, vis: &mut T) {
+    match vdata {
+        VariantData::Struct(fields, id) |
+        VariantData::Tuple(fields, id) => {
+            visit_vec(fields, |field| vis.visit_struct_field(field));
+            vis.visit_id(id);
+        }
+        VariantData::Unit(id) => vis.visit_id(id),
+    }
+}
+
+pub fn noop_visit_trait_ref<T: MutVisitor>(TraitRef { path, ref_id }: &mut TraitRef, vis: &mut T) {
+    vis.visit_path(path);
+    vis.visit_id(ref_id);
+}
+
+pub fn noop_visit_poly_trait_ref<T: MutVisitor>(p: &mut PolyTraitRef, vis: &mut T) {
+    let PolyTraitRef { bound_generic_params, trait_ref, span } = p;
+    vis.visit_generic_params(bound_generic_params);
+    vis.visit_trait_ref(trait_ref);
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_struct_field<T: MutVisitor>(f: &mut StructField, visitor: &mut T) {
+    let StructField { span, ident, vis, id, ty, attrs } = f;
+    visitor.visit_span(span);
+    visit_opt(ident, |ident| visitor.visit_ident(ident));
+    visitor.visit_vis(vis);
+    visitor.visit_id(id);
+    visitor.visit_ty(ty);
+    visit_attrs(attrs, visitor);
+}
+
+pub fn noop_visit_field<T: MutVisitor>(f: &mut Field, vis: &mut T) {
+    let Field { ident, expr, span, is_shorthand: _, attrs } = f;
+    vis.visit_ident(ident);
+    vis.visit_expr(expr);
+    vis.visit_span(span);
+    visit_thin_attrs(attrs, vis);
+}
+
+pub fn noop_visit_mt<T: MutVisitor>(MutTy { ty, mutbl: _ }: &mut MutTy, vis: &mut T) {
+    vis.visit_ty(ty);
+}
+
+pub fn noop_visit_block<T: MutVisitor>(block: &mut P<Block>, vis: &mut T) {
+    let Block { id, stmts, rules: _, span } = block.deref_mut();
+    vis.visit_id(id);
+    stmts.flat_map_in_place(|stmt| vis.flat_map_stmt(stmt));
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_item_kind<T: MutVisitor>(kind: &mut ItemKind, vis: &mut T) {
+    match kind {
+        ItemKind::ExternCrate(_orig_name) => {}
+        ItemKind::Use(use_tree) => vis.visit_use_tree(use_tree),
+        ItemKind::Static(ty, _mut, expr) => {
+            vis.visit_ty(ty);
+            vis.visit_expr(expr);
+        }
+        ItemKind::Const(ty, expr) => {
+            vis.visit_ty(ty);
+            vis.visit_expr(expr);
+        }
+        ItemKind::Fn(decl, header, generics, body) => {
+            vis.visit_fn_decl(decl);
+            vis.visit_fn_header(header);
+            vis.visit_generics(generics);
+            vis.visit_block(body);
+        }
+        ItemKind::Mod(m) => vis.visit_mod(m),
+        ItemKind::ForeignMod(nm) => vis.visit_foreign_mod(nm),
+        ItemKind::GlobalAsm(_ga) => {}
+        ItemKind::Ty(ty, generics) => {
+            vis.visit_ty(ty);
+            vis.visit_generics(generics);
+        }
+        ItemKind::Existential(bounds, generics) => {
+            visit_bounds(bounds, vis);
+            vis.visit_generics(generics);
+        }
+        ItemKind::Enum(EnumDef { variants }, generics) => {
+            visit_vec(variants, |variant| vis.visit_variant(variant));
+            vis.visit_generics(generics);
+        }
+        ItemKind::Struct(variant_data, generics) |
+        ItemKind::Union(variant_data, generics) => {
+            vis.visit_variant_data(variant_data);
+            vis.visit_generics(generics);
+        }
+        ItemKind::Impl(_unsafety, _polarity, _defaultness, generics, trait_ref, ty, items) => {
+            vis.visit_generics(generics);
+            visit_opt(trait_ref, |trait_ref| vis.visit_trait_ref(trait_ref));
+            vis.visit_ty(ty);
+            items.flat_map_in_place(|item| vis.flat_map_impl_item(item));
+        }
+        ItemKind::Trait(_is_auto, _unsafety, generics, bounds, items) => {
+            vis.visit_generics(generics);
+            visit_bounds(bounds, vis);
+            items.flat_map_in_place(|item| vis.flat_map_trait_item(item));
+        }
+        ItemKind::TraitAlias(generics, bounds) => {
+            vis.visit_generics(generics);
+            visit_bounds(bounds, vis);
+        }
+        ItemKind::Mac(m) => vis.visit_mac(m),
+        ItemKind::MacroDef(def) => vis.visit_macro_def(def),
+    }
+}
+
+pub fn noop_flat_map_trait_item<T: MutVisitor>(mut item: TraitItem, vis: &mut T)
+    -> SmallVec<[TraitItem; 1]>
+{
+    let TraitItem { id, ident, attrs, generics, node, span, tokens: _ } = &mut item;
+    vis.visit_id(id);
+    vis.visit_ident(ident);
+    visit_attrs(attrs, vis);
+    vis.visit_generics(generics);
+    match node {
+        TraitItemKind::Const(ty, default) => {
+            vis.visit_ty(ty);
+            visit_opt(default, |default| vis.visit_expr(default));
+        }
+        TraitItemKind::Method(sig, body) => {
+            visit_method_sig(sig, vis);
+            visit_opt(body, |body| vis.visit_block(body));
+        }
+        TraitItemKind::Type(bounds, default) => {
+            visit_bounds(bounds, vis);
+            visit_opt(default, |default| vis.visit_ty(default));
+        }
+        TraitItemKind::Macro(mac) => {
+            vis.visit_mac(mac);
+        }
+    }
+    vis.visit_span(span);
+
+    smallvec![item]
+}
+
+pub fn noop_flat_map_impl_item<T: MutVisitor>(mut item: ImplItem, visitor: &mut T)
+                                              -> SmallVec<[ImplItem; 1]>
+{
+    let ImplItem { id, ident, vis, defaultness: _, attrs, generics, node, span, tokens: _ } =
+        &mut item;
+    visitor.visit_id(id);
+    visitor.visit_ident(ident);
+    visitor.visit_vis(vis);
+    visit_attrs(attrs, visitor);
+    visitor.visit_generics(generics);
+    match node  {
+        ImplItemKind::Const(ty, expr) => {
+            visitor.visit_ty(ty);
+            visitor.visit_expr(expr);
+        }
+        ImplItemKind::Method(sig, body) => {
+            visit_method_sig(sig, visitor);
+            visitor.visit_block(body);
+        }
+        ImplItemKind::Type(ty) => visitor.visit_ty(ty),
+        ImplItemKind::Existential(bounds) => visit_bounds(bounds, visitor),
+        ImplItemKind::Macro(mac) => visitor.visit_mac(mac),
+    }
+    visitor.visit_span(span);
+
+    smallvec![item]
+}
+
+pub fn noop_visit_fn_header<T: MutVisitor>(header: &mut FnHeader, vis: &mut T) {
+    let FnHeader { unsafety: _, asyncness, constness: _, abi: _ } = header;
+    vis.visit_asyncness(asyncness);
+}
+
+pub fn noop_visit_mod<T: MutVisitor>(Mod { inner, items, inline: _ }: &mut Mod, vis: &mut T) {
+    vis.visit_span(inner);
+    items.flat_map_in_place(|item| vis.flat_map_item(item));
+}
+
+pub fn noop_visit_crate<T: MutVisitor>(krate: &mut Crate, vis: &mut T) {
+    visit_clobber(krate, |Crate { module, attrs, span }| {
+        let item = P(Item {
+            ident: keywords::Invalid.ident(),
+            attrs,
+            id: DUMMY_NODE_ID,
+            vis: respan(span.shrink_to_lo(), VisibilityKind::Public),
+            span,
+            node: ItemKind::Mod(module),
+            tokens: None,
+        });
+        let items = vis.flat_map_item(item);
+
+        let len = items.len();
+        if len == 0 {
+            let module = Mod { inner: span, items: vec![], inline: true };
+            Crate { module, attrs: vec![], span }
+        } else if len == 1 {
+            let Item { attrs, span, node, .. } = items.into_iter().next().unwrap().into_inner();
+            match node {
+                ItemKind::Mod(module) => Crate { module, attrs, span },
+                _ => panic!("visitor converted a module to not a module"),
+            }
+        } else {
+            panic!("a crate cannot expand to more than one item");
+        }
+    });
+}
+
+// Mutate one item into possibly many items.
+pub fn noop_flat_map_item<T: MutVisitor>(mut item: P<Item>, visitor: &mut T)
+                                         -> SmallVec<[P<Item>; 1]> {
+    let Item { ident, attrs, id, node, vis, span, tokens: _ } = item.deref_mut();
+    visitor.visit_ident(ident);
+    visit_attrs(attrs, visitor);
+    visitor.visit_id(id);
+    visitor.visit_item_kind(node);
+    visitor.visit_vis(vis);
+    visitor.visit_span(span);
+
+    // FIXME: if `tokens` is modified with a call to `vis.visit_tts` it causes
+    //        an ICE during resolve... odd!
+
+    smallvec![item]
+}
+
+pub fn noop_flat_map_foreign_item<T: MutVisitor>(mut item: ForeignItem, visitor: &mut T)
+    -> SmallVec<[ForeignItem; 1]>
+{
+    let ForeignItem { ident, attrs, node, id, span, vis } = &mut item;
+    visitor.visit_ident(ident);
+    visit_attrs(attrs, visitor);
+    match node {
+        ForeignItemKind::Fn(fdec, generics) => {
+            visitor.visit_fn_decl(fdec);
+            visitor.visit_generics(generics);
+        }
+        ForeignItemKind::Static(t, _m) => visitor.visit_ty(t),
+        ForeignItemKind::Ty => {}
+        ForeignItemKind::Macro(mac) => visitor.visit_mac(mac),
+    }
+    visitor.visit_id(id);
+    visitor.visit_span(span);
+    visitor.visit_vis(vis);
+
+    smallvec![item]
+}
+
+pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
+    let Pat { id, node, span } = pat.deref_mut();
+    vis.visit_id(id);
+    match node {
+        PatKind::Wild => {}
+        PatKind::Ident(_binding_mode, ident, sub) => {
+            vis.visit_ident(ident);
+            visit_opt(sub, |sub| vis.visit_pat(sub));
+        }
+        PatKind::Lit(e) => vis.visit_expr(e),
+        PatKind::TupleStruct(path, pats, _ddpos) => {
+            vis.visit_path(path);
+            visit_vec(pats, |pat| vis.visit_pat(pat));
+        }
+        PatKind::Path(qself, path) => {
+            vis.visit_qself(qself);
+            vis.visit_path(path);
+        }
+        PatKind::Struct(path, fields, _etc) => {
+            vis.visit_path(path);
+            for Spanned { node: FieldPat { ident, pat, is_shorthand: _, attrs }, span } in fields {
+                vis.visit_ident(ident);
+                vis.visit_pat(pat);
+                visit_thin_attrs(attrs, vis);
+                vis.visit_span(span);
+            };
+        }
+        PatKind::Tuple(elts, _ddpos) => visit_vec(elts, |elt| vis.visit_pat(elt)),
+        PatKind::Box(inner) => vis.visit_pat(inner),
+        PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner),
+        PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => {
+            vis.visit_expr(e1);
+            vis.visit_expr(e2);
+            vis.visit_span(span);
+        },
+        PatKind::Slice(before, slice, after) => {
+            visit_vec(before, |pat| vis.visit_pat(pat));
+            visit_opt(slice, |slice| vis.visit_pat(slice));
+            visit_vec(after, |pat| vis.visit_pat(pat));
+        }
+        PatKind::Paren(inner) => vis.visit_pat(inner),
+        PatKind::Mac(mac) => vis.visit_mac(mac),
+    }
+    vis.visit_span(span);
+}
+
+pub fn noop_visit_anon_const<T: MutVisitor>(AnonConst { id, value }: &mut AnonConst, vis: &mut T) {
+    vis.visit_id(id);
+    vis.visit_expr(value);
+}
+
+pub fn noop_visit_expr<T: MutVisitor>(Expr { node, id, span, attrs }: &mut Expr, vis: &mut T) {
+    match node {
+        ExprKind::Box(expr) => vis.visit_expr(expr),
+        ExprKind::ObsoleteInPlace(a, b) => {
+            vis.visit_expr(a);
+            vis.visit_expr(b);
+        }
+        ExprKind::Array(exprs) => visit_exprs(exprs, vis),
+        ExprKind::Repeat(expr, count) => {
+            vis.visit_expr(expr);
+            vis.visit_anon_const(count);
+        }
+        ExprKind::Tup(exprs) => visit_exprs(exprs, vis),
+        ExprKind::Call(f, args) => {
+            vis.visit_expr(f);
+            visit_exprs(args, vis);
+        }
+        ExprKind::MethodCall(PathSegment { ident, id, args }, exprs) => {
+            vis.visit_ident(ident);
+            vis.visit_id(id);
+            visit_opt(args, |args| vis.visit_generic_args(args));
+            visit_exprs(exprs, vis);
+        }
+        ExprKind::Binary(_binop, lhs, rhs) => {
+            vis.visit_expr(lhs);
+            vis.visit_expr(rhs);
+        }
+        ExprKind::Unary(_unop, ohs) => vis.visit_expr(ohs),
+        ExprKind::Lit(_lit) => {}
+        ExprKind::Cast(expr, ty) => {
+            vis.visit_expr(expr);
+            vis.visit_ty(ty);
+        }
+        ExprKind::Type(expr, ty) => {
+            vis.visit_expr(expr);
+            vis.visit_ty(ty);
+        }
+        ExprKind::AddrOf(_m, ohs) => vis.visit_expr(ohs),
+        ExprKind::If(cond, tr, fl) => {
+            vis.visit_expr(cond);
+            vis.visit_block(tr);
+            visit_opt(fl, |fl| vis.visit_expr(fl));
+        }
+        ExprKind::IfLet(pats, expr, tr, fl) => {
+            visit_vec(pats, |pat| vis.visit_pat(pat));
+            vis.visit_expr(expr);
+            vis.visit_block(tr);
+            visit_opt(fl, |fl| vis.visit_expr(fl));
+        }
+        ExprKind::While(cond, body, label) => {
+            vis.visit_expr(cond);
+            vis.visit_block(body);
+            visit_opt(label, |label| vis.visit_label(label));
+        }
+        ExprKind::WhileLet(pats, expr, body, label) => {
+            visit_vec(pats, |pat| vis.visit_pat(pat));
+            vis.visit_expr(expr);
+            vis.visit_block(body);
+            visit_opt(label, |label| vis.visit_label(label));
+        }
+        ExprKind::ForLoop(pat, iter, body, label) => {
+            vis.visit_pat(pat);
+            vis.visit_expr(iter);
+            vis.visit_block(body);
+            visit_opt(label, |label| vis.visit_label(label));
+        }
+        ExprKind::Loop(body, label) => {
+            vis.visit_block(body);
+            visit_opt(label, |label| vis.visit_label(label));
+        }
+        ExprKind::Match(expr, arms) => {
+            vis.visit_expr(expr);
+            visit_vec(arms, |arm| vis.visit_arm(arm));
+        }
+        ExprKind::Closure(_capture_by, asyncness, _movability, decl, body, span) => {
+            vis.visit_asyncness(asyncness);
+            vis.visit_fn_decl(decl);
+            vis.visit_expr(body);
+            vis.visit_span(span);
+        }
+        ExprKind::Block(blk, label) => {
+            vis.visit_block(blk);
+            visit_opt(label, |label| vis.visit_label(label));
+        }
+        ExprKind::Async(_capture_by, node_id, body) => {
+            vis.visit_id(node_id);
+            vis.visit_block(body);
+        }
+        ExprKind::Assign(el, er) => {
+            vis.visit_expr(el);
+            vis.visit_expr(er);
+        }
+        ExprKind::AssignOp(_op, el, er) => {
+            vis.visit_expr(el);
+            vis.visit_expr(er);
+        }
+        ExprKind::Field(el, ident) => {
+            vis.visit_expr(el);
+            vis.visit_ident(ident);
+        }
+        ExprKind::Index(el, er) => {
+            vis.visit_expr(el);
+            vis.visit_expr(er);
+        }
+        ExprKind::Range(e1, e2, _lim) => {
+            visit_opt(e1, |e1| vis.visit_expr(e1));
+            visit_opt(e2, |e2| vis.visit_expr(e2));
+        }
+        ExprKind::Path(qself, path) => {
+            vis.visit_qself(qself);
+            vis.visit_path(path);
+        }
+        ExprKind::Break(label, expr) => {
+            visit_opt(label, |label| vis.visit_label(label));
+            visit_opt(expr, |expr| vis.visit_expr(expr));
+        }
+        ExprKind::Continue(label) => {
+            visit_opt(label, |label| vis.visit_label(label));
+        }
+        ExprKind::Ret(expr) => {
+            visit_opt(expr, |expr| vis.visit_expr(expr));
+        }
+        ExprKind::InlineAsm(asm) => {
+            let InlineAsm { asm: _, asm_str_style: _, outputs, inputs, clobbers: _, volatile: _,
+                            alignstack: _, dialect: _, ctxt: _ } = asm.deref_mut();
+            for out in outputs {
+                let InlineAsmOutput { constraint: _, expr, is_rw: _, is_indirect: _ } = out;
+                vis.visit_expr(expr);
+            }
+            visit_vec(inputs, |(_c, expr)| vis.visit_expr(expr));
+        }
+        ExprKind::Mac(mac) => vis.visit_mac(mac),
+        ExprKind::Struct(path, fields, expr) => {
+            vis.visit_path(path);
+            visit_vec(fields, |field| vis.visit_field(field));
+            visit_opt(expr, |expr| vis.visit_expr(expr));
+        },
+        ExprKind::Paren(expr) => {
+            vis.visit_expr(expr);
+
+            // Nodes that are equal modulo `Paren` sugar no-ops should have the same ids.
+            *id = expr.id;
+            vis.visit_span(span);
+            visit_thin_attrs(attrs, vis);
+            return;
+        }
+        ExprKind::Yield(expr) => {
+            visit_opt(expr, |expr| vis.visit_expr(expr));
+        }
+        ExprKind::Try(expr) => vis.visit_expr(expr),
+        ExprKind::TryBlock(body) => vis.visit_block(body),
+        ExprKind::Err => {}
+    }
+    vis.visit_id(id);
+    vis.visit_span(span);
+    visit_thin_attrs(attrs, vis);
+}
+
+pub fn noop_filter_map_expr<T: MutVisitor>(mut e: P<Expr>, vis: &mut T) -> Option<P<Expr>> {
+    Some({ vis.visit_expr(&mut e); e })
+}
+
+pub fn noop_flat_map_stmt<T: MutVisitor>(Stmt { node, mut span, mut id }: Stmt, vis: &mut T)
+    -> SmallVec<[Stmt; 1]>
+{
+    vis.visit_id(&mut id);
+    vis.visit_span(&mut span);
+    noop_flat_map_stmt_kind(node, vis).into_iter().map(|node| {
+        Stmt { id, node, span }
+    }).collect()
+}
+
+pub fn noop_flat_map_stmt_kind<T: MutVisitor>(node: StmtKind, vis: &mut T)
+                                              -> SmallVec<[StmtKind; 1]> {
+    match node {
+        StmtKind::Local(mut local) =>
+            smallvec![StmtKind::Local({ vis.visit_local(&mut local); local })],
+        StmtKind::Item(item) => vis.flat_map_item(item).into_iter().map(StmtKind::Item).collect(),
+        StmtKind::Expr(expr) => {
+            vis.filter_map_expr(expr).into_iter().map(StmtKind::Expr).collect()
+        }
+        StmtKind::Semi(expr) => {
+            vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect()
+        }
+        StmtKind::Mac(mut mac) => {
+            let (mac_, _semi, attrs) = mac.deref_mut();
+            vis.visit_mac(mac_);
+            visit_thin_attrs(attrs, vis);
+            smallvec![StmtKind::Mac(mac)]
+        }
+    }
+}
+
+pub fn noop_visit_vis<T: MutVisitor>(Spanned { node, span }: &mut Visibility, vis: &mut T) {
+    match node {
+        VisibilityKind::Public | VisibilityKind::Crate(_) | VisibilityKind::Inherited => {}
+        VisibilityKind::Restricted { path, id } => {
+            vis.visit_path(path);
+            vis.visit_id(id);
+        }
+    }
+    vis.visit_span(span);
+}
+
+#[cfg(test)]
+mod tests {
+    use std::io;
+    use crate::ast::{self, Ident};
+    use crate::util::parser_testing::{string_to_crate, matches_codepattern};
+    use crate::print::pprust;
+    use crate::mut_visit;
+    use crate::with_globals;
+    use super::*;
+
+    // this version doesn't care about getting comments or docstrings in.
+    fn fake_print_crate(s: &mut pprust::State<'_>,
+                        krate: &ast::Crate) -> io::Result<()> {
+        s.print_mod(&krate.module, &krate.attrs)
+    }
+
+    // change every identifier to "zz"
+    struct ToZzIdentMutVisitor;
+
+    impl MutVisitor for ToZzIdentMutVisitor {
+        fn visit_ident(&mut self, ident: &mut ast::Ident) {
+            *ident = Ident::from_str("zz");
+        }
+        fn visit_mac(&mut self, mac: &mut ast::Mac) {
+            mut_visit::noop_visit_mac(mac, self)
+        }
+    }
+
+    // maybe add to expand.rs...
+    macro_rules! assert_pred {
+        ($pred:expr, $predname:expr, $a:expr , $b:expr) => (
+            {
+                let pred_val = $pred;
+                let a_val = $a;
+                let b_val = $b;
+                if !(pred_val(&a_val, &b_val)) {
+                    panic!("expected args satisfying {}, got {} and {}",
+                          $predname, a_val, b_val);
+                }
+            }
+        )
+    }
+
+    // make sure idents get transformed everywhere
+    #[test] fn ident_transformation () {
+        with_globals(|| {
+            let mut zz_visitor = ToZzIdentMutVisitor;
+            let mut krate = string_to_crate(
+                "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
+            zz_visitor.visit_crate(&mut krate);
+            assert_pred!(
+                matches_codepattern,
+                "matches_codepattern",
+                pprust::to_string(|s| fake_print_crate(s, &krate)),
+                "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
+        })
+    }
+
+    // even inside macro defs....
+    #[test] fn ident_transformation_in_defs () {
+        with_globals(|| {
+            let mut zz_visitor = ToZzIdentMutVisitor;
+            let mut krate = string_to_crate(
+                "macro_rules! a {(b $c:expr $(d $e:token)f+ => \
+                (g $(d $d $e)+))} ".to_string());
+            zz_visitor.visit_crate(&mut krate);
+            assert_pred!(
+                matches_codepattern,
+                "matches_codepattern",
+                pprust::to_string(|s| fake_print_crate(s, &krate)),
+                "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
+        })
+    }
+}
+
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 914a0667ebf..b36ca0574cb 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -1,10 +1,12 @@
-use attr;
-use ast;
-use source_map::respan;
-use parse::{SeqSep, PResult};
-use parse::token::{self, Nonterminal, DelimToken};
-use parse::parser::{Parser, TokenType, PathStyle};
-use tokenstream::{TokenStream, TokenTree};
+use crate::attr;
+use crate::ast;
+use crate::source_map::respan;
+use crate::parse::{SeqSep, PResult};
+use crate::parse::token::{self, Nonterminal, DelimToken};
+use crate::parse::parser::{Parser, TokenType, PathStyle};
+use crate::tokenstream::{TokenStream, TokenTree};
+
+use log::debug;
 
 #[derive(Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -74,7 +76,7 @@ impl<'a> Parser<'a> {
     /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy`
     /// that prescribes how to handle inner attributes.
     fn parse_attribute_with_inner_parse_policy(&mut self,
-                                               inner_parse_policy: InnerAttributeParsePolicy)
+                                               inner_parse_policy: InnerAttributeParsePolicy<'_>)
                                                -> PResult<'a, ast::Attribute> {
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index a1cdfd9146a..b4103440e35 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -2,7 +2,7 @@
 
 // Predicates on exprs and stmts that the pretty-printer and parser use
 
-use ast;
+use crate::ast;
 
 /// Does this expression require a semicolon to be treated
 /// as a statement? The negation of this: 'can this expression
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index ffc480d829d..74fff3324ea 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -1,11 +1,13 @@
-pub use self::CommentStyle::*;
+pub use CommentStyle::*;
+
+use crate::ast;
+use crate::source_map::SourceMap;
+use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
+use crate::parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
+use crate::print::pprust;
 
-use ast;
-use source_map::SourceMap;
 use syntax_pos::{BytePos, CharPos, Pos, FileName};
-use parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
-use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
-use print::pprust;
+use log::debug;
 
 use std::io::Read;
 use std::usize;
@@ -135,7 +137,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
     panic!("not a doc-comment: {}", comment);
 }
 
-fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
+fn push_blank_line_comment(rdr: &StringReader<'_>, comments: &mut Vec<Comment>) {
     debug!(">>> blank-line comment");
     comments.push(Comment {
         style: BlankLine,
@@ -144,7 +146,10 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
     });
 }
 
-fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) {
+fn consume_whitespace_counting_blank_lines(
+    rdr: &mut StringReader<'_>,
+    comments: &mut Vec<Comment>
+) {
     while is_pattern_whitespace(rdr.ch) && !rdr.is_eof() {
         if rdr.ch_is('\n') {
             push_blank_line_comment(rdr, &mut *comments);
@@ -153,7 +158,7 @@ fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mu
     }
 }
 
-fn read_shebang_comment(rdr: &mut StringReader,
+fn read_shebang_comment(rdr: &mut StringReader<'_>,
                         code_to_the_left: bool,
                         comments: &mut Vec<Comment>) {
     debug!(">>> shebang comment");
@@ -166,7 +171,7 @@ fn read_shebang_comment(rdr: &mut StringReader,
     });
 }
 
-fn read_line_comments(rdr: &mut StringReader,
+fn read_line_comments(rdr: &mut StringReader<'_>,
                       code_to_the_left: bool,
                       comments: &mut Vec<Comment>) {
     debug!(">>> line comments");
@@ -192,9 +197,9 @@ fn read_line_comments(rdr: &mut StringReader,
     }
 }
 
-/// Returns None if the first col chars of s contain a non-whitespace char.
-/// Otherwise returns Some(k) where k is first char offset after that leading
-/// whitespace.  Note k may be outside bounds of s.
+/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
+/// Otherwise returns `Some(k)` where `k` is first char offset after that leading
+/// whitespace. Note that `k` may be outside bounds of `s`.
 fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
     let mut idx = 0;
     for (i, ch) in s.char_indices().take(col.to_usize()) {
@@ -222,7 +227,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col:
     lines.push(s1);
 }
 
-fn read_block_comment(rdr: &mut StringReader,
+fn read_block_comment(rdr: &mut StringReader<'_>,
                       code_to_the_left: bool,
                       comments: &mut Vec<Comment>) {
     debug!(">>> block comment");
@@ -312,7 +317,7 @@ fn read_block_comment(rdr: &mut StringReader,
 }
 
 
-fn consume_comment(rdr: &mut StringReader,
+fn consume_comment(rdr: &mut StringReader<'_>,
                    comments: &mut Vec<Comment>,
                    code_to_the_left: &mut bool,
                    anything_to_the_left: &mut bool) {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 06f9162a400..9168d4b61c1 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,9 +1,10 @@
-use ast::{self, Ident};
+use crate::ast::{self, Ident};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
+use crate::parse::{token, ParseSess};
+use crate::symbol::{Symbol, keywords};
+
 use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
-use source_map::{SourceMap, FilePathMapping};
-use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
-use parse::{token, ParseSess};
-use symbol::{Symbol, keywords};
 use core::unicode::property::Pattern_White_Space;
 
 use std::borrow::Cow;
@@ -11,6 +12,7 @@ use std::char;
 use std::iter;
 use std::mem::replace;
 use rustc_data_structures::sync::Lrc;
+use log::debug;
 
 pub mod comments;
 mod tokentrees;
@@ -31,6 +33,15 @@ impl Default for TokenAndSpan {
     }
 }
 
+#[derive(Clone, Debug)]
+pub struct UnmatchedBrace {
+    pub expected_delim: token::DelimToken,
+    pub found_delim: token::DelimToken,
+    pub found_span: Span,
+    pub unclosed_span: Option<Span>,
+    pub candidate_span: Option<Span>,
+}
+
 pub struct StringReader<'a> {
     pub sess: &'a ParseSess,
     /// The absolute offset within the source_map of the next character to read
@@ -56,6 +67,7 @@ pub struct StringReader<'a> {
     span_src_raw: Span,
     /// Stack of open delimiters and their spans. Used for error message.
     open_braces: Vec<(token::DelimToken, Span)>,
+    crate unmatched_braces: Vec<UnmatchedBrace>,
     /// The type and spans for all braces
     ///
     /// Used only for error recovery when arriving to EOF with mismatched braces.
@@ -100,7 +112,7 @@ impl<'a> StringReader<'a> {
         self.unwrap_or_abort(res)
     }
 
-    /// Return the next token. EFFECT: advances the string_reader.
+    /// Returns the next token. EFFECT: advances the string_reader.
     pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
         assert!(self.fatal_errs.is_empty());
         let ret_val = TokenAndSpan {
@@ -220,6 +232,7 @@ impl<'a> StringReader<'a> {
             span: syntax_pos::DUMMY_SP,
             span_src_raw: syntax_pos::DUMMY_SP,
             open_braces: Vec::new(),
+            unmatched_braces: Vec::new(),
             matching_delim_spans: Vec::new(),
             override_span,
             last_unclosed_found_span: None,
@@ -412,7 +425,7 @@ impl<'a> StringReader<'a> {
         self.with_str_from_to(start, self.pos, f)
     }
 
-    /// Create a Name from a given offset to the current offset, each
+    /// Creates a Name from a given offset to the current offset, each
     /// adjusted 1 towards each other (assumes that on either side there is a
     /// single-byte delimiter).
     fn name_from(&self, start: BytePos) -> ast::Name {
@@ -449,7 +462,7 @@ impl<'a> StringReader<'a> {
         }
         return s.into();
 
-        fn translate_crlf_(rdr: &StringReader,
+        fn translate_crlf_(rdr: &StringReader<'_>,
                            start: BytePos,
                            s: &str,
                            mut j: usize,
@@ -657,7 +670,7 @@ impl<'a> StringReader<'a> {
     }
 
     /// If there is whitespace, shebang, or a comment, scan it. Otherwise,
-    /// return None.
+    /// return `None`.
     fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
         match self.ch.unwrap_or('\0') {
             // # to handle shebang at start of file -- this is the entry point
@@ -907,7 +920,7 @@ impl<'a> StringReader<'a> {
     /// in a byte, (non-raw) byte string, char, or (non-raw) string literal.
     /// `start` is the position of `first_source_char`, which is already consumed.
     ///
-    /// Returns true if there was a valid char/byte, false otherwise.
+    /// Returns `true` if there was a valid char/byte.
     fn scan_char_or_byte(&mut self,
                          start: BytePos,
                          first_source_char: char,
@@ -1139,7 +1152,7 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    /// Check that a base is valid for a floating literal, emitting a nice
+    /// Checks that a base is valid for a floating literal, emitting a nice
     /// error if it isn't.
     fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
         match base {
@@ -1172,7 +1185,7 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    /// Return the next token from the string, advances the input past that
+    /// Returns the next token from the string, advances the input past that
     /// token, and updates the interner
     fn next_token_inner(&mut self) -> Result<token::Token, ()> {
         let c = self.ch;
@@ -1866,19 +1879,20 @@ fn char_at(s: &str, byte: usize) -> char {
 mod tests {
     use super::*;
 
-    use ast::{Ident, CrateConfig};
-    use symbol::Symbol;
-    use syntax_pos::{BytePos, Span, NO_EXPANSION};
-    use source_map::SourceMap;
-    use errors;
-    use feature_gate::UnstableFeatures;
-    use parse::token;
+    use crate::ast::{Ident, CrateConfig};
+    use crate::symbol::Symbol;
+    use crate::source_map::SourceMap;
+    use crate::errors;
+    use crate::feature_gate::UnstableFeatures;
+    use crate::parse::token;
+    use crate::diagnostics::plugin::ErrorMap;
+    use crate::with_globals;
     use std::io;
     use std::path::PathBuf;
-    use diagnostics::plugin::ErrorMap;
+    use syntax_pos::{BytePos, Span, NO_EXPANSION};
     use rustc_data_structures::fx::FxHashSet;
     use rustc_data_structures::sync::Lock;
-    use with_globals;
+
     fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
         let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
                                                           Some(sm.clone()),
@@ -1943,7 +1957,7 @@ mod tests {
 
     // check that the given reader produces the desired stream
     // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader, expected: Vec<token::Token>) {
+    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<token::Token>) {
         for expected_tok in &expected {
             assert_eq!(&string_reader.next_token().tok, expected_tok);
         }
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index d219f29f06c..0db36c84cdf 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,7 +1,7 @@
-use print::pprust::token_to_string;
-use parse::lexer::StringReader;
-use parse::{token, PResult};
-use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
+use crate::print::pprust::token_to_string;
+use crate::parse::lexer::{StringReader, UnmatchedBrace};
+use crate::parse::{token, PResult};
+use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
@@ -101,38 +101,38 @@ impl<'a> StringReader<'a> {
                     }
                     // Incorrect delimiter.
                     token::CloseDelim(other) => {
-                        let token_str = token_to_string(&self.token);
+                        let mut unclosed_delimiter = None;
+                        let mut candidate = None;
                         if self.last_unclosed_found_span != Some(self.span) {
                             // do not complain about the same unclosed delimiter multiple times
                             self.last_unclosed_found_span = Some(self.span);
-                            let msg = format!("incorrect close delimiter: `{}`", token_str);
-                            let mut err = self.sess.span_diagnostic.struct_span_err(
-                                self.span,
-                                &msg,
-                            );
-                            err.span_label(self.span, "incorrect close delimiter");
                             // This is a conservative error: only report the last unclosed
                             // delimiter. The previous unclosed delimiters could actually be
                             // closed! The parser just hasn't gotten to them yet.
                             if let Some(&(_, sp)) = self.open_braces.last() {
-                                err.span_label(sp, "un-closed delimiter");
+                                unclosed_delimiter = Some(sp);
                             };
                             if let Some(current_padding) = sm.span_to_margin(self.span) {
                                 for (brace, brace_span) in &self.open_braces {
                                     if let Some(padding) = sm.span_to_margin(*brace_span) {
                                         // high likelihood of these two corresponding
                                         if current_padding == padding && brace == &other {
-                                            err.span_label(
-                                                *brace_span,
-                                                "close delimiter possibly meant for this",
-                                            );
+                                            candidate = Some(*brace_span);
                                         }
                                     }
                                 }
                             }
-                            err.emit();
+                            let (tok, _) = self.open_braces.pop().unwrap();
+                            self.unmatched_braces.push(UnmatchedBrace {
+                                expected_delim: tok,
+                                found_delim: other,
+                                found_span: self.span,
+                                unclosed_span: unclosed_delimiter,
+                                candidate_span: candidate,
+                            });
+                        } else {
+                            self.open_braces.pop();
                         }
-                        self.open_braces.pop().unwrap();
 
                         // If the incorrect delimiter matches an earlier opening
                         // delimiter, then don't consume it (it can be used to
diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs
index 7da4284c0e4..75862178169 100644
--- a/src/libsyntax/parse/lexer/unicode_chars.rs
+++ b/src/libsyntax/parse/lexer/unicode_chars.rs
@@ -2,7 +2,7 @@
 // http://www.unicode.org/Public/security/10.0.0/confusables.txt
 
 use syntax_pos::{Span, NO_EXPANSION};
-use errors::{Applicability, DiagnosticBuilder};
+use crate::errors::{Applicability, DiagnosticBuilder};
 use super::StringReader;
 
 const UNICODE_ARRAY: &[(char, &str, char)] = &[
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index c7330004d6d..69940ae621c 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -1,16 +1,19 @@
-//! The main parser interface
+//! The main parser interface.
+
+use crate::ast::{self, CrateConfig, NodeId};
+use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
+use crate::feature_gate::UnstableFeatures;
+use crate::parse::parser::Parser;
+use crate::symbol::Symbol;
+use crate::tokenstream::{TokenStream, TokenTree};
+use crate::diagnostics::plugin::ErrorMap;
+use crate::print::pprust::token_to_string;
 
 use rustc_data_structures::sync::{Lrc, Lock};
-use ast::{self, CrateConfig, NodeId};
-use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
-use source_map::{SourceMap, FilePathMapping};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
-use feature_gate::UnstableFeatures;
-use parse::parser::Parser;
-use symbol::Symbol;
-use tokenstream::{TokenStream, TokenTree};
-use diagnostics::plugin::ErrorMap;
+use log::debug;
 
 use rustc_data_structures::fx::FxHashSet;
 use std::borrow::Cow;
@@ -35,12 +38,11 @@ pub struct ParseSess {
     pub unstable_features: UnstableFeatures,
     pub config: CrateConfig,
     pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
-    /// Places where raw identifiers were used. This is used for feature gating
-    /// raw identifiers
+    /// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
     pub raw_identifier_spans: Lock<Vec<Span>>,
-    /// The registered diagnostics codes
+    /// The registered diagnostics codes.
     crate registered_diagnostics: Lock<ErrorMap>,
-    /// Used to determine and report recursive mod inclusions
+    /// Used to determine and report recursive module inclusions.
     included_mod_stack: Lock<Vec<PathBuf>>,
     source_map: Lrc<SourceMap>,
     pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
@@ -125,31 +127,33 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
 }
 
 pub fn parse_crate_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                       -> PResult<ast::Crate> {
+                                       -> PResult<'_, ast::Crate> {
     new_parser_from_source_str(sess, name, source).parse_crate_mod()
 }
 
 pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                             -> PResult<Vec<ast::Attribute>> {
+                                             -> PResult<'_, Vec<ast::Attribute>> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
-pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess,
-                                    override_span: Option<Span>)
-                                    -> TokenStream {
+pub fn parse_stream_from_source_str(
+    name: FileName,
+    source: String,
+    sess: &ParseSess,
+    override_span: Option<Span>,
+) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
     source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
-/// Create a new parser from a source string
-pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-                                      -> Parser {
+/// Creates a new parser from a source string.
+pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
 }
 
-/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
+/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
 /// token stream.
 pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-    -> Result<Parser, Vec<Diagnostic>>
+    -> Result<Parser<'_>, Vec<Diagnostic>>
 {
     let mut parser = maybe_source_file_to_parser(sess,
                                                  sess.source_map().new_source_file(name, source))?;
@@ -157,13 +161,13 @@ pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source
     Ok(parser)
 }
 
-/// Create a new parser, handling errors as appropriate
+/// Creates a new parser, handling errors as appropriate
 /// if the file doesn't exist
 pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
     source_file_to_parser(sess, file_to_source_file(sess, path, None))
 }
 
-/// Create a new parser, returning buffered diagnostics if the file doesn't
+/// Creates a new parser, returning buffered diagnostics if the file doesn't
 /// exist or from lexing the initial token stream.
 pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path)
     -> Result<Parser<'a>, Vec<Diagnostic>> {
@@ -186,19 +190,21 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 }
 
 /// Given a source_file and config, return a parser
-fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
+fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic,
                      maybe_source_file_to_parser(sess, source_file))
 }
 
 /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
 /// initial token stream.
-fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
-    -> Result<Parser, Vec<Diagnostic>>
-{
+fn maybe_source_file_to_parser(
+    sess: &ParseSess,
+    source_file: Lrc<SourceFile>,
+) -> Result<Parser<'_>, Vec<Diagnostic>> {
     let end_pos = source_file.end_pos;
-    let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
-
+    let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
+    let mut parser = stream_to_parser(sess, stream);
+    parser.unclosed_delims = unclosed_delims;
     if parser.token == token::Eof && parser.span.is_dummy() {
         parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
     }
@@ -208,7 +214,7 @@ fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
     stream_to_parser(sess, tts.into_iter().collect())
 }
 
@@ -232,7 +238,7 @@ fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a session and a path and an optional span (for error reporting),
-/// add the path to the session's source_map and return the new source_file.
+/// add the path to the session's `source_map` and return the new `source_file`.
 fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
                    -> Lrc<SourceFile> {
     match try_file_to_source_file(sess, path, spanopt) {
@@ -244,37 +250,56 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     }
 }
 
-/// Given a source_file, produce a sequence of token-trees
-pub fn source_file_to_stream(sess: &ParseSess,
-                             source_file: Lrc<SourceFile>,
-                             override_span: Option<Span>) -> TokenStream {
+/// Given a source_file, produces a sequence of token trees.
+pub fn source_file_to_stream(
+    sess: &ParseSess,
+    source_file: Lrc<SourceFile>,
+    override_span: Option<Span>,
+) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
     panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
 }
 
-/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
+/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
 /// parsing the token tream.
-pub fn maybe_file_to_stream(sess: &ParseSess,
-                            source_file: Lrc<SourceFile>,
-                            override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
+pub fn maybe_file_to_stream(
+    sess: &ParseSess,
+    source_file: Lrc<SourceFile>,
+    override_span: Option<Span>,
+) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
     let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
     srdr.real_token();
 
     match srdr.parse_all_token_trees() {
-        Ok(stream) => Ok(stream),
+        Ok(stream) => Ok((stream, srdr.unmatched_braces)),
         Err(err) => {
             let mut buffer = Vec::with_capacity(1);
             err.buffer(&mut buffer);
+            // Not using `emit_unclosed_delims` to use `db.buffer`
+            for unmatched in srdr.unmatched_braces {
+                let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
+                    "incorrect close delimiter: `{}`",
+                    token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+                ));
+                db.span_label(unmatched.found_span, "incorrect close delimiter");
+                if let Some(sp) = unmatched.candidate_span {
+                    db.span_label(sp, "close delimiter possibly meant for this");
+                }
+                if let Some(sp) = unmatched.unclosed_span {
+                    db.span_label(sp, "un-closed delimiter");
+                }
+                db.buffer(&mut buffer);
+            }
             Err(buffer)
         }
     }
 }
 
-/// Given stream and the `ParseSess`, produce a parser
-pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
+/// Given stream and the `ParseSess`, produces a parser.
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
     Parser::new(sess, stream, None, true, false)
 }
 
-/// Parse a string representing a character literal into its final form.
+/// Parses a string representing a character literal into its final form.
 /// Rather than just accepting/rejecting a given literal, unescapes it as
 /// well. Can take any slice prefixed by a character escape. Returns the
 /// character and the number of characters consumed.
@@ -333,15 +358,14 @@ fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) {
     }
 }
 
-/// Parse a string representing a string literal into its final form. Does
-/// unescaping.
+/// Parses a string representing a string literal into its final form. Does unescaping.
 pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
     debug!("str_lit: given {}", lit.escape_default());
     let mut res = String::with_capacity(lit.len());
 
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
-    /// Eat everything up to a non-whitespace
+    /// Eat everything up to a non-whitespace.
     fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) {
         loop {
             match it.peek().map(|x| x.1) {
@@ -402,7 +426,7 @@ pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
     res
 }
 
-/// Parse a string representing a raw string literal into its final form. The
+/// Parses a string representing a raw string literal into its final form. The
 /// only operation this does is convert embedded CRLF into a single LF.
 fn raw_str_lit(lit: &str) -> String {
     debug!("raw_str_lit: given {}", lit.escape_default());
@@ -528,7 +552,7 @@ fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
     filtered_float_lit(Symbol::intern(s), suffix, diag)
 }
 
-/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
+/// Parses a string representing a byte literal into its final form. Similar to `char_lit`.
 fn byte_lit(lit: &str) -> (u8, usize) {
     let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
 
@@ -565,7 +589,7 @@ fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> {
 
     let error = |i| panic!("lexer should have rejected {} at {}", lit, i);
 
-    /// Eat everything up to a non-whitespace
+    /// Eat everything up to a non-whitespace.
     fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
         loop {
             match it.peek().map(|x| x.1) {
@@ -732,10 +756,11 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
     })
 }
 
-/// `SeqSep` : a sequence separator (token)
-/// and whether a trailing separator is allowed.
+/// A sequence separator.
 pub struct SeqSep {
+    /// The seperator token.
     pub sep: Option<token::Token>,
+    /// `true` if a trailing separator is allowed.
     pub trailing_sep_allowed: bool,
 }
 
@@ -758,22 +783,22 @@ impl SeqSep {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use crate::ast::{self, Ident, PatKind};
+    use crate::attr::first_attr_value_str_by_name;
+    use crate::ptr::P;
+    use crate::print::pprust::item_to_string;
+    use crate::tokenstream::{DelimSpan, TokenTree};
+    use crate::util::parser_testing::string_to_stream;
+    use crate::util::parser_testing::{string_to_expr, string_to_item};
+    use crate::with_globals;
     use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
-    use ast::{self, Ident, PatKind};
-    use attr::first_attr_value_str_by_name;
-    use ptr::P;
-    use print::pprust::item_to_string;
-    use tokenstream::{DelimSpan, TokenTree};
-    use util::parser_testing::string_to_stream;
-    use util::parser_testing::{string_to_expr, string_to_item};
-    use with_globals;
 
     /// Parses an item.
     ///
     /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
     /// when a syntax error occurred.
     fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                        -> PResult<Option<P<ast::Item>>> {
+                                        -> PResult<'_, Option<P<ast::Item>>> {
         new_parser_from_source_str(sess, name, source).parse_item()
     }
 
@@ -913,20 +938,20 @@ mod tests {
         struct PatIdentVisitor {
             spans: Vec<Span>
         }
-        impl<'a> ::visit::Visitor<'a> for PatIdentVisitor {
+        impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
             fn visit_pat(&mut self, p: &'a ast::Pat) {
                 match p.node {
                     PatKind::Ident(_ , ref spannedident, _) => {
                         self.spans.push(spannedident.span.clone());
                     }
                     _ => {
-                        ::visit::walk_pat(self, p);
+                        crate::visit::walk_pat(self, p);
                     }
                 }
             }
         }
         let mut v = PatIdentVisitor { spans: Vec::new() };
-        ::visit::walk_item(&mut v, &item);
+        crate::visit::walk_item(&mut v, &item);
         return v.spans;
     }
 
@@ -1007,7 +1032,7 @@ mod tests {
     fn ttdelim_span() {
         fn parse_expr_from_source_str(
             name: FileName, source: String, sess: &ParseSess
-        ) -> PResult<P<ast::Expr>> {
+        ) -> PResult<'_, P<ast::Expr>> {
             new_parser_from_source_str(sess, name, source).parse_expr()
         }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 514b2952c50..67154305735 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -1,53 +1,55 @@
+use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
+use crate::ast::{GenericBound, TraitBoundModifier};
+use crate::ast::Unsafety;
+use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
+use crate::ast::Block;
+use crate::ast::{BlockCheckMode, CaptureBy, Movability};
+use crate::ast::{Constness, Crate};
+use crate::ast::Defaultness;
+use crate::ast::EnumDef;
+use crate::ast::{Expr, ExprKind, RangeLimits};
+use crate::ast::{Field, FnDecl, FnHeader};
+use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
+use crate::ast::{GenericParam, GenericParamKind};
+use crate::ast::GenericArg;
+use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
+use crate::ast::{Label, Lifetime, Lit, LitKind};
+use crate::ast::Local;
+use crate::ast::MacStmtStyle;
+use crate::ast::{Mac, Mac_, MacDelimiter};
+use crate::ast::{MutTy, Mutability};
+use crate::ast::{Pat, PatKind, PathSegment};
+use crate::ast::{PolyTraitRef, QSelf};
+use crate::ast::{Stmt, StmtKind};
+use crate::ast::{VariantData, StructField};
+use crate::ast::StrStyle;
+use crate::ast::SelfKind;
+use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
+use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds};
+use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
+use crate::ast::{UseTree, UseTreeKind};
+use crate::ast::{BinOpKind, UnOp};
+use crate::ast::{RangeEnd, RangeSyntax};
+use crate::{ast, attr};
+use crate::ext::base::DummyResult;
+use crate::source_map::{self, SourceMap, Spanned, respan};
+use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
+use crate::parse::{self, SeqSep, classify, token};
+use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::token::DelimToken;
+use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
+use crate::util::parser::{AssocOp, Fixity};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::parse::PResult;
+use crate::ThinVec;
+use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::symbol::{Symbol, keywords};
+
 use rustc_target::spec::abi::{self, Abi};
-use ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
-use ast::{GenericBound, TraitBoundModifier};
-use ast::Unsafety;
-use ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
-use ast::Block;
-use ast::{BlockCheckMode, CaptureBy, Movability};
-use ast::{Constness, Crate};
-use ast::Defaultness;
-use ast::EnumDef;
-use ast::{Expr, ExprKind, RangeLimits};
-use ast::{Field, FnDecl, FnHeader};
-use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
-use ast::{GenericParam, GenericParamKind};
-use ast::GenericArg;
-use ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
-use ast::{Label, Lifetime, Lit, LitKind};
-use ast::Local;
-use ast::MacStmtStyle;
-use ast::{Mac, Mac_, MacDelimiter};
-use ast::{MutTy, Mutability};
-use ast::{Pat, PatKind, PathSegment};
-use ast::{PolyTraitRef, QSelf};
-use ast::{Stmt, StmtKind};
-use ast::{VariantData, StructField};
-use ast::StrStyle;
-use ast::SelfKind;
-use ast::{TraitItem, TraitRef, TraitObjectSyntax};
-use ast::{Ty, TyKind, TypeBinding, GenericBounds};
-use ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
-use ast::{UseTree, UseTreeKind};
-use ast::{BinOpKind, UnOp};
-use ast::{RangeEnd, RangeSyntax};
-use {ast, attr};
-use ext::base::DummyResult;
-use source_map::{self, SourceMap, Spanned, respan};
 use syntax_pos::{self, Span, MultiSpan, BytePos, FileName};
-use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
-use parse::{self, SeqSep, classify, token};
-use parse::lexer::TokenAndSpan;
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::token::DelimToken;
-use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
-use util::parser::{AssocOp, Fixity};
-use print::pprust;
-use ptr::P;
-use parse::PResult;
-use ThinVec;
-use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use symbol::{Symbol, keywords};
+use log::{debug, trace};
 
 use std::borrow::Cow;
 use std::cmp;
@@ -64,7 +66,7 @@ pub enum AliasKind {
     Existential(GenericBounds),
 }
 
-bitflags! {
+bitflags::bitflags! {
     struct Restrictions: u8 {
         const STMT_EXPR         = 1 << 0;
         const NO_STRUCT_LITERAL = 1 << 1;
@@ -73,7 +75,7 @@ bitflags! {
 
 type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
 
-/// How to parse a path.
+/// Specifies how to parse a path.
 #[derive(Copy, Clone, PartialEq)]
 pub enum PathStyle {
     /// In some contexts, notably in expressions, paths with generic arguments are ambiguous
@@ -109,7 +111,7 @@ enum BlockMode {
     Ignore,
 }
 
-/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression
+/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
 /// dropped into the token stream, which happens while parsing the result of
 /// macro expansion). Placement of these is not as complex as I feared it would
 /// be. The important thing is to make sure that lookahead doesn't balk at
@@ -249,6 +251,11 @@ pub struct Parser<'a> {
     ///
     /// See the comments in the `parse_path_segment` function for more details.
     crate unmatched_angle_bracket_count: u32,
+    crate max_angle_bracket_count: u32,
+    /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
+    /// it gets removed from here. Every entry left at the end gets emitted as an independent
+    /// error.
+    crate unclosed_delims: Vec<UnmatchedBrace>,
 }
 
 
@@ -395,6 +402,7 @@ crate enum TokenType {
     Ident,
     Path,
     Type,
+    Const,
 }
 
 impl TokenType {
@@ -407,15 +415,16 @@ impl TokenType {
             TokenType::Ident => "identifier".to_string(),
             TokenType::Path => "path".to_string(),
             TokenType::Type => "type".to_string(),
+            TokenType::Const => "const".to_string(),
         }
     }
 }
 
-/// Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT<u8, u8>`,
+/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
 /// `IDENT<<u8 as Trait>::AssocTy>`.
 ///
 /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
-/// that IDENT is not the ident of a fn trait
+/// that `IDENT` is not the ident of a fn trait.
 fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
     t == &token::ModSep || t == &token::Lt ||
     t == &token::BinOp(token::Shl)
@@ -453,7 +462,7 @@ pub enum Error {
 impl Error {
     fn span_err<S: Into<MultiSpan>>(self,
                                         sp: S,
-                                        handler: &errors::Handler) -> DiagnosticBuilder {
+                                        handler: &errors::Handler) -> DiagnosticBuilder<'_> {
         match self {
             Error::FileNotFoundForModule { ref mod_name,
                                            ref default_path,
@@ -516,7 +525,7 @@ impl From<P<Expr>> for LhsExpr {
     }
 }
 
-/// Create a placeholder argument.
+/// Creates a placeholder argument.
 fn dummy_arg(span: Span) -> Arg {
     let ident = Ident::new(keywords::Invalid.name(), span);
     let pat = P(Pat {
@@ -571,6 +580,8 @@ impl<'a> Parser<'a> {
             desugar_doc_comments,
             cfg_mods: true,
             unmatched_angle_bracket_count: 0,
+            max_angle_bracket_count: 0,
+            unclosed_delims: Vec::new(),
         };
 
         let tok = parser.next_tok();
@@ -603,7 +614,7 @@ impl<'a> Parser<'a> {
         next
     }
 
-    /// Convert the current token to a string using self's reader
+    /// Converts the current token to a string using `self`'s reader.
     pub fn this_token_to_string(&self) -> String {
         pprust::token_to_string(&self.token)
     }
@@ -638,13 +649,12 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume the token t. Signal an error if
-    /// the next token is not t.
-    pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  ()> {
+    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
+    pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
-                Ok(())
+                Ok(false)
             } else {
                 let token_str = pprust::token_to_string(t);
                 let this_token_str = self.this_token_descr();
@@ -659,6 +669,12 @@ impl<'a> Parser<'a> {
                     self.sess.source_map().next_point(self.prev_span)
                 };
                 let label_exp = format!("expected `{}`", token_str);
+                match self.recover_closing_delimiter(&[t.clone()], err) {
+                    Err(e) => err = e,
+                    Ok(recovered) => {
+                        return Ok(recovered);
+                    }
+                }
                 let cm = self.sess.source_map();
                 match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
                     (Ok(ref a), Ok(ref b)) if a.line == b.line => {
@@ -678,12 +694,64 @@ impl<'a> Parser<'a> {
         }
     }
 
+    fn recover_closing_delimiter(
+        &mut self,
+        tokens: &[token::Token],
+        mut err: DiagnosticBuilder<'a>,
+    ) -> PResult<'a, bool> {
+        let mut pos = None;
+        // we want to use the last closing delim that would apply
+        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+                && Some(self.span) > unmatched.unclosed_span
+            {
+                pos = Some(i);
+            }
+        }
+        match pos {
+            Some(pos) => {
+                // Recover and assume that the detected unclosed delimiter was meant for
+                // this location. Emit the diagnostic and act as if the delimiter was
+                // present for the parser's sake.
+
+                 // Don't attempt to recover from this unclosed delimiter more than once.
+                let unmatched = self.unclosed_delims.remove(pos);
+                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+
+                 // We want to suggest the inclusion of the closing delimiter where it makes
+                // the most sense, which is immediately after the last token:
+                //
+                //  {foo(bar {}}
+                //      -      ^
+                //      |      |
+                //      |      help: `)` may belong here (FIXME: #58270)
+                //      |
+                //      unclosed delimiter
+                if let Some(sp) = unmatched.unclosed_span {
+                    err.span_label(sp, "unclosed delimiter");
+                }
+                err.span_suggestion_short(
+                    self.sess.source_map().next_point(self.prev_span),
+                    &format!("{} may belong here", delim.to_string()),
+                    delim.to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                self.expected_tokens.clear();  // reduce errors
+                Ok(true)
+            }
+            _ => Err(err),
+        }
+    }
+
     /// Expect next token to be edible or inedible token.  If edible,
     /// then consume it; if inedible, then return without consuming
     /// anything.  Signal a fatal error if next token is unexpected.
-    pub fn expect_one_of(&mut self,
-                         edible: &[token::Token],
-                         inedible: &[token::Token]) -> PResult<'a,  ()>{
+    pub fn expect_one_of(
+        &mut self,
+        edible: &[token::Token],
+        inedible: &[token::Token],
+    ) -> PResult<'a, bool /* recovered */> {
         fn tokens_to_string(tokens: &[TokenType]) -> String {
             let mut i = tokens.iter();
             // This might be a sign we need a connect method on Iterator.
@@ -703,10 +771,10 @@ impl<'a> Parser<'a> {
         }
         if edible.contains(&self.token) {
             self.bump();
-            Ok(())
+            Ok(false)
         } else if inedible.contains(&self.token) {
             // leave it in the input
-            Ok(())
+            Ok(false)
         } else {
             let mut expected = edible.iter()
                 .map(|x| TokenType::Token(x.clone()))
@@ -757,6 +825,15 @@ impl<'a> Parser<'a> {
             } else {
                 label_sp
             };
+            match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
+                TokenType::Token(t) => Some(t.clone()),
+                _ => None,
+            }).collect::<Vec<_>>(), err) {
+                Err(e) => err = e,
+                Ok(recovered) => {
+                    return Ok(recovered);
+                }
+            }
 
             let cm = self.sess.source_map();
             match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
@@ -789,7 +866,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// returns the span of expr, if it was not interpolated or the span of the interpolated token
+    /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
     fn interpolated_or_expr_span(&self,
                                  expr: PResult<'a, P<Expr>>)
                                  -> PResult<'a, (Span, P<Expr>)> {
@@ -863,7 +940,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Check if the next token is `tok`, and return `true` if so.
+    /// Checks if the next token is `tok`, and returns `true` if so.
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
     /// encountered.
@@ -873,8 +950,7 @@ impl<'a> Parser<'a> {
         is_present
     }
 
-    /// Consume token 'tok' if it exists. Returns true if the given
-    /// token was present, false otherwise.
+    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
     pub fn eat(&mut self, tok: &token::Token) -> bool {
         let is_present = self.check(tok);
         if is_present { self.bump() }
@@ -886,8 +962,8 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw)
     }
 
-    /// If the next token is the given keyword, eat it and return
-    /// true. Otherwise, return false.
+    /// If the next token is the given keyword, eats it and returns
+    /// `true`. Otherwise, returns `false`.
     pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
         if self.check_keyword(kw) {
             self.bump();
@@ -906,9 +982,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// If the given word is not a keyword, signal an error.
-    /// If the next token is not the given word, signal an error.
-    /// Otherwise, eat it.
+    /// If the given word is not a keyword, signals an error.
+    /// If the next token is not the given word, signals an error.
+    /// Otherwise, eats it.
     fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
         if !self.eat_keyword(kw) {
             self.unexpected()
@@ -944,11 +1020,20 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume a `+`. if `+=` is seen, replace it with a `=`
-    /// and continue. If a `+` is not seen, return false.
+    fn check_const_arg(&mut self) -> bool {
+        if self.token.can_begin_const_arg() {
+            true
+        } else {
+            self.expected_tokens.push(TokenType::Const);
+            false
+        }
+    }
+
+    /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
+    /// and continues. If a `+` is not seen, returns `false`.
     ///
-    /// This is using when token splitting += into +.
-    /// See issue 47856 for an example of when this may occur.
+    /// This is used when token-splitting `+=` into `+`.
+    /// See issue #47856 for an example of when this may occur.
     fn eat_plus(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
         match self.token {
@@ -967,7 +1052,7 @@ impl<'a> Parser<'a> {
 
 
     /// Checks to see if the next token is either `+` or `+=`.
-    /// Otherwise returns false.
+    /// Otherwise returns `false`.
     fn check_plus(&mut self) -> bool {
         if self.token.is_like_plus() {
             true
@@ -978,8 +1063,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume an `&`. If `&&` is seen, replace it with a single
-    /// `&` and continue. If an `&` is not seen, signal an error.
+    /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
+    /// `&` and continues. If an `&` is not seen, signals an error.
     fn expect_and(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
         match self.token {
@@ -995,8 +1080,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume an `|`. If `||` is seen, replace it with a single
-    /// `|` and continue. If an `|` is not seen, signal an error.
+    /// Expects and consumes an `|`. If `||` is seen, replaces it with a single
+    /// `|` and continues. If an `|` is not seen, signals an error.
     fn expect_or(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
         match self.token {
@@ -1028,8 +1113,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Attempt to consume a `<`. If `<<` is seen, replace it with a single
-    /// `<` and continue. If a `<` is not seen, return false.
+    /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
+    /// `<` and continue. If `<-` is seen, replaces it with a single `<`
+    /// and continue. If a `<` is not seen, returns false.
     ///
     /// This is meant to be used when parsing generics on a path to get the
     /// starting token.
@@ -1045,12 +1131,18 @@ impl<'a> Parser<'a> {
                 self.bump_with(token::Lt, span);
                 true
             }
+            token::LArrow => {
+                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                self.bump_with(token::BinOp(token::Minus), span);
+                true
+            }
             _ => false,
         };
 
         if ate {
             // See doc comment for `unmatched_angle_bracket_count`.
             self.unmatched_angle_bracket_count += 1;
+            self.max_angle_bracket_count += 1;
             debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
         }
 
@@ -1065,9 +1157,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume a GT. if a >> is seen, replace it
-    /// with a single > and continue. If a GT is not seen,
-    /// signal an error.
+    /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
+    /// with a single `>` and continues. If a `>` is not seen, signals an error.
     fn expect_gt(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::Gt));
         let ate = match self.token {
@@ -1091,18 +1182,18 @@ impl<'a> Parser<'a> {
         };
 
         match ate {
-            Some(x) => {
+            Some(_) => {
                 // See doc comment for `unmatched_angle_bracket_count`.
                 self.unmatched_angle_bracket_count -= 1;
                 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
 
-                Ok(x)
+                Ok(())
             },
             None => self.unexpected(),
         }
     }
 
-    /// Eat and discard tokens until one of `kets` is encountered. Respects token trees,
+    /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
     fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
         let handler = self.diagnostic();
@@ -1115,8 +1206,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse a sequence, including the closing delimiter. The function
-    /// f must consume tokens until reaching the next separator or
+    /// Parses a sequence, including the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     pub fn parse_seq_to_end<T, F>(&mut self,
                                   ket: &token::Token,
@@ -1125,19 +1216,22 @@ impl<'a> Parser<'a> {
                                   -> PResult<'a, Vec<T>> where
         F: FnMut(&mut Parser<'a>) -> PResult<'a,  T>,
     {
-        let val = self.parse_seq_to_before_end(ket, sep, f)?;
-        self.bump();
+        let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+        if !recovered {
+            self.bump();
+        }
         Ok(val)
     }
 
-    /// Parse a sequence, not including the closing delimiter. The function
-    /// f must consume tokens until reaching the next separator or
+    /// Parses a sequence, not including the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
-    pub fn parse_seq_to_before_end<T, F>(&mut self,
-                                         ket: &token::Token,
-                                         sep: SeqSep,
-                                         f: F)
-                                         -> PResult<'a, Vec<T>>
+    pub fn parse_seq_to_before_end<T, F>(
+        &mut self,
+        ket: &token::Token,
+        sep: SeqSep,
+        f: F,
+    ) -> PResult<'a, (Vec<T>, bool)>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
     {
         self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
@@ -1149,10 +1243,11 @@ impl<'a> Parser<'a> {
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: F,
-    ) -> PResult<'a, Vec<T>>
+    ) -> PResult<'a, (Vec<T>, bool /* recovered */)>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
     {
-        let mut first: bool = true;
+        let mut first = true;
+        let mut recovered = false;
         let mut v = vec![];
         while !kets.iter().any(|k| {
                 match expect {
@@ -1168,23 +1263,30 @@ impl<'a> Parser<'a> {
                 if first {
                     first = false;
                 } else {
-                    if let Err(mut e) = self.expect(t) {
-                        // Attempt to keep parsing if it was a similar separator
-                        if let Some(ref tokens) = t.similar_tokens() {
-                            if tokens.contains(&self.token) {
-                                self.bump();
-                            }
+                    match self.expect(t) {
+                        Ok(false) => {}
+                        Ok(true) => {
+                            recovered = true;
+                            break;
                         }
-                        e.emit();
-                        // Attempt to keep parsing if it was an omitted separator
-                        match f(self) {
-                            Ok(t) => {
-                                v.push(t);
-                                continue;
-                            },
-                            Err(mut e) => {
-                                e.cancel();
-                                break;
+                        Err(mut e) => {
+                            // Attempt to keep parsing if it was a similar separator
+                            if let Some(ref tokens) = t.similar_tokens() {
+                                if tokens.contains(&self.token) {
+                                    self.bump();
+                                }
+                            }
+                            e.emit();
+                            // Attempt to keep parsing if it was an omitted separator
+                            match f(self) {
+                                Ok(t) => {
+                                    v.push(t);
+                                    continue;
+                                },
+                                Err(mut e) => {
+                                    e.cancel();
+                                    break;
+                                }
                             }
                         }
                     }
@@ -1203,23 +1305,26 @@ impl<'a> Parser<'a> {
             v.push(t);
         }
 
-        Ok(v)
+        Ok((v, recovered))
     }
 
-    /// Parse a sequence, including the closing delimiter. The function
-    /// f must consume tokens until reaching the next separator or
+    /// Parses a sequence, including the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
-    fn parse_unspanned_seq<T, F>(&mut self,
-                                     bra: &token::Token,
-                                     ket: &token::Token,
-                                     sep: SeqSep,
-                                     f: F)
-                                     -> PResult<'a, Vec<T>> where
+    fn parse_unspanned_seq<T, F>(
+        &mut self,
+        bra: &token::Token,
+        ket: &token::Token,
+        sep: SeqSep,
+        f: F,
+    ) -> PResult<'a, Vec<T>> where
         F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     {
         self.expect(bra)?;
-        let result = self.parse_seq_to_before_end(ket, sep, f)?;
-        self.eat(ket);
+        let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+        if !recovered {
+            self.eat(ket);
+        }
         Ok(result)
     }
 
@@ -1313,7 +1418,7 @@ impl<'a> Parser<'a> {
         self.sess.span_diagnostic.span_bug(sp, m)
     }
 
-    fn cancel(&self, err: &mut DiagnosticBuilder) {
+    fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
         self.sess.span_diagnostic.cancel(err)
     }
 
@@ -1321,15 +1426,14 @@ impl<'a> Parser<'a> {
         &self.sess.span_diagnostic
     }
 
-    /// Is the current token one of the keywords that signals a bare function
-    /// type?
+    /// Is the current token one of the keywords that signals a bare function type?
     fn token_is_bare_fn_keyword(&mut self) -> bool {
         self.check_keyword(keywords::Fn) ||
             self.check_keyword(keywords::Unsafe) ||
             self.check_keyword(keywords::Extern)
     }
 
-    /// parse a `TyKind::BareFn` type:
+    /// Parses a `TyKind::BareFn` type.
     fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
         /*
 
@@ -1366,7 +1470,7 @@ impl<'a> Parser<'a> {
         })))
     }
 
-    /// Parse asyncness: `async` or nothing
+    /// Parses asyncness: `async` or nothing.
     fn parse_asyncness(&mut self) -> IsAsync {
         if self.eat_keyword(keywords::Async) {
             IsAsync::Async {
@@ -1378,7 +1482,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse unsafety: `unsafe` or nothing.
+    /// Parses unsafety: `unsafe` or nothing.
     fn parse_unsafety(&mut self) -> Unsafety {
         if self.eat_keyword(keywords::Unsafe) {
             Unsafety::Unsafe
@@ -1387,7 +1491,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse the items in a trait declaration
+    /// Parses the items in a trait declaration.
     pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
         maybe_whole!(self, NtTraitItem, |x| x);
         let attrs = self.parse_outer_attributes()?;
@@ -1504,7 +1608,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse optional return type [ -> TY ] in function decl
+    /// Parses an optional return type `[ -> TY ]` in a function declaration.
     fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
         if self.eat(&token::RArrow) {
             Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?))
@@ -1513,12 +1617,13 @@ impl<'a> Parser<'a> {
         }
     }
 
-    // Parse a type
+    /// Parses a type.
     pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
         self.parse_ty_common(true, true)
     }
 
-    /// Parse a type in restricted contexts where `+` is not permitted.
+    /// Parses a type in restricted contexts where `+` is not permitted.
+    ///
     /// Example 1: `&'a TYPE`
     ///     `+` is prohibited to maintain operator priority (P(+) < P(&)).
     /// Example 2: `value1 as TYPE + value2`
@@ -1721,7 +1826,7 @@ impl<'a> Parser<'a> {
         match ty.node {
             TyKind::Rptr(ref lifetime, ref mut_ty) => {
                 let sum_with_parens = pprust::to_string(|s| {
-                    use print::pprust::PrintState;
+                    use crate::print::pprust::PrintState;
 
                     s.s.word("&")?;
                     s.print_opt_lifetime(lifetime)?;
@@ -1821,7 +1926,8 @@ impl<'a> Parser<'a> {
         self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
-    /// Skip unexpected attributes and doc comments in this position and emit an appropriate error.
+    /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+    /// error.
     fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
         if let token::DocComment(_) = self.token {
             let mut err = self.diagnostic().struct_span_err(
@@ -1850,8 +1956,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// This version of parse arg doesn't necessarily require
-    /// identifier names.
+    /// This version of parse arg doesn't necessarily require identifier names.
     fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> {
         maybe_whole!(self, NtArg, |x| x);
 
@@ -1959,12 +2064,12 @@ impl<'a> Parser<'a> {
         Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
     }
 
-    /// Parse a single function argument
+    /// Parses a single function argument.
     crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
         self.parse_arg_general(true, false)
     }
 
-    /// Parse an argument in a lambda header e.g., |arg, arg|
+    /// Parses an argument in a lambda header (e.g., `|arg, arg|`).
     fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
         let pat = self.parse_pat(Some("argument name"))?;
         let t = if self.eat(&token::Colon) {
@@ -1991,7 +2096,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Matches token_lit = LIT_INTEGER | ...
+    /// Matches `token_lit = LIT_INTEGER | ...`.
     fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
         let out = match self.token {
             token::Interpolated(ref nt) => match nt.0 {
@@ -2057,7 +2162,7 @@ impl<'a> Parser<'a> {
         Ok(out)
     }
 
-    /// Matches lit = true | false | token_lit
+    /// Matches `lit = true | false | token_lit`.
     crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
         let lo = self.span;
         let lit = if self.eat_keyword(keywords::True) {
@@ -2071,7 +2176,7 @@ impl<'a> Parser<'a> {
         Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
     }
 
-    /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat)
+    /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
     crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
@@ -2113,7 +2218,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parses qualified path.
+    /// Parses a qualified path.
     /// Assumes that the leading `<` has been parsed already.
     ///
     /// `qualified_path = <type [as trait_ref]>::path`
@@ -2189,8 +2294,9 @@ impl<'a> Parser<'a> {
         Ok(ast::Path { segments, span: lo.to(self.prev_span) })
     }
 
-    /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat.
-    /// This is used when parsing derive macro paths in `#[derive]` attributes.
+    /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
+    /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
+    /// attributes.
     pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
         let meta_ident = match self.token {
             token::Interpolated(ref nt) => match nt.0 {
@@ -2271,7 +2377,10 @@ impl<'a> Parser<'a> {
             // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
             // it isn't, then we reset the unmatched angle bracket count as we're about to start
             // parsing a new path.
-            if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; }
+            if style == PathStyle::Expr {
+                self.unmatched_angle_bracket_count = 0;
+                self.max_angle_bracket_count = 0;
+            }
 
             let args = if self.eat_lt() {
                 // `<'a, T, A = U>`
@@ -2283,12 +2392,14 @@ impl<'a> Parser<'a> {
             } else {
                 // `(T, U) -> R`
                 self.bump(); // `(`
-                let inputs = self.parse_seq_to_before_tokens(
+                let (inputs, recovered) = self.parse_seq_to_before_tokens(
                     &[&token::CloseDelim(token::Paren)],
                     SeqSep::trailing_allowed(token::Comma),
                     TokenExpectType::Expect,
                     |p| p.parse_ty())?;
-                self.bump(); // `)`
+                if !recovered {
+                    self.bump(); // `)`
+                }
                 let span = lo.to(self.prev_span);
                 let output = if self.eat(&token::RArrow) {
                     Some(self.parse_ty_common(false, false)?)
@@ -2310,7 +2421,7 @@ impl<'a> Parser<'a> {
         self.token.is_lifetime()
     }
 
-    /// Parse single lifetime 'a or panic.
+    /// Parses a single lifetime `'a` or panics.
     crate fn expect_lifetime(&mut self) -> Lifetime {
         if let Some(ident) = self.token.lifetime() {
             let span = self.span;
@@ -2331,7 +2442,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse mutability (`mut` or nothing).
+    /// Parses mutability (`mut` or nothing).
     fn parse_mutability(&mut self) -> Mutability {
         if self.eat_keyword(keywords::Mut) {
             Mutability::Mutable
@@ -2462,12 +2573,10 @@ impl<'a> Parser<'a> {
     }
 
     /// At the bottom (top?) of the precedence hierarchy,
-    /// parse things like parenthesized exprs,
-    /// macros, return, etc.
+    /// Parses things like parenthesized exprs, macros, `return`, etc.
     ///
-    /// N.B., this does not parse outer attributes,
-    ///     and is private because it only works
-    ///     correctly if called from parse_dot_or_call_expr().
+    /// N.B., this does not parse outer attributes, and is private because it only works
+    /// correctly if called from `parse_dot_or_call_expr()`.
     fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
@@ -2494,9 +2603,13 @@ impl<'a> Parser<'a> {
                 // (e,) is a tuple with only one field, e
                 let mut es = vec![];
                 let mut trailing_comma = false;
+                let mut recovered = false;
                 while self.token != token::CloseDelim(token::Paren) {
                     es.push(self.parse_expr()?);
-                    self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
+                    recovered = self.expect_one_of(
+                        &[],
+                        &[token::Comma, token::CloseDelim(token::Paren)],
+                    )?;
                     if self.eat(&token::Comma) {
                         trailing_comma = true;
                     } else {
@@ -2504,7 +2617,9 @@ impl<'a> Parser<'a> {
                         break;
                     }
                 }
-                self.bump();
+                if !recovered {
+                    self.bump();
+                }
 
                 hi = self.prev_span;
                 ex = if es.len() == 1 && !trailing_comma {
@@ -2701,6 +2816,21 @@ impl<'a> Parser<'a> {
                     hi = pth.span;
                     ex = ExprKind::Path(None, pth);
                 } else {
+                    if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
+                        // Don't complain about bare semicolons after unclosed braces
+                        // recovery in order to keep the error count down. Fixing the
+                        // delimiters will possibly also fix the bare semicolon found in
+                        // expression context. For example, silence the following error:
+                        // ```
+                        // error: expected expression, found `;`
+                        //  --> file.rs:2:13
+                        //   |
+                        // 2 |     foo(bar(;
+                        //   |             ^ expected expression
+                        // ```
+                        self.bump();
+                        return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
+                    }
                     match self.parse_literal_maybe_minus() {
                         Ok(expr) => {
                             hi = expr.span;
@@ -2800,7 +2930,7 @@ impl<'a> Parser<'a> {
 
             match self.expect_one_of(&[token::Comma],
                                      &[token::CloseDelim(token::Brace)]) {
-                Ok(()) => if let Some(f) = parsed_field.or(recovery_field) {
+                Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
                     // only include the field if there's no parse error for the field name
                     fields.push(f);
                 }
@@ -2831,7 +2961,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse a block or unsafe block
+    /// Parses a block or unsafe block.
     fn parse_block_expr(&mut self, opt_label: Option<Label>,
                             lo: Span, blk_mode: BlockCheckMode,
                             outer_attrs: ThinVec<Attribute>)
@@ -2845,7 +2975,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
     }
 
-    /// parse a.b or a(13) or a[4] or just a
+    /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
     fn parse_dot_or_call_expr(&mut self,
                                   already_parsed_attrs: Option<ThinVec<Attribute>>)
                                   -> PResult<'a, P<Expr>> {
@@ -3063,7 +3193,7 @@ impl<'a> Parser<'a> {
                             None => continue,
                         };
                         let sugg = pprust::to_string(|s| {
-                            use print::pprust::PrintState;
+                            use crate::print::pprust::PrintState;
                             s.popen()?;
                             s.print_expr(&e)?;
                             s.s.word( ".")?;
@@ -3153,7 +3283,7 @@ impl<'a> Parser<'a> {
         self.span = span;
     }
 
-    /// parse a single token tree from the input.
+    /// Parses a single token tree from the input.
     crate fn parse_token_tree(&mut self) -> TokenTree {
         match self.token {
             token::OpenDelim(..) => {
@@ -3313,7 +3443,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(lo.to(hi), ex, attrs));
     }
 
-    /// Parse an associative expression
+    /// Parses an associative expression.
     ///
     /// This parses an expression accounting for associativity and precedence of the operators in
     /// the expression.
@@ -3324,7 +3454,7 @@ impl<'a> Parser<'a> {
         self.parse_assoc_expr_with(0, already_parsed_attrs.into())
     }
 
-    /// Parse an associative expression with operators of at least `min_prec` precedence
+    /// Parses an associative expression with operators of at least `min_prec` precedence.
     fn parse_assoc_expr_with(&mut self,
                                  min_prec: usize,
                                  lhs: LhsExpr)
@@ -3455,6 +3585,14 @@ impl<'a> Parser<'a> {
                 }),
             }?;
 
+            // Make sure that the span of the parent node is larger than the span of lhs and rhs,
+            // including the attributes.
+            let lhs_span = lhs
+                .attrs
+                .iter()
+                .filter(|a| a.style == AttrStyle::Outer)
+                .next()
+                .map_or(lhs_span, |a| a.span);
             let span = lhs_span.to(rhs.span);
             lhs = match op {
                 AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
@@ -3651,7 +3789,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse an 'if' or 'if let' expression ('if' token already eaten)
+    /// Parses an `if` or `if let` expression (`if` token already eaten).
     fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
         if self.check_keyword(keywords::Let) {
             return self.parse_if_let_expr(attrs);
@@ -3687,7 +3825,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
     }
 
-    /// Parse an 'if let' expression ('if' token already eaten)
+    /// Parses an `if let` expression (`if` token already eaten).
     fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
                              -> PResult<'a, P<Expr>> {
         let lo = self.prev_span;
@@ -3705,7 +3843,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
     }
 
-    // `move |args| expr`
+    /// Parses `move |args| expr`.
     fn parse_lambda_expr(&mut self,
                              attrs: ThinVec<Attribute>)
                              -> PResult<'a, P<Expr>>
@@ -3801,7 +3939,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
     }
 
-    /// Parse a 'while' or 'while let' expression ('while' token already eaten)
+    /// Parses a `while` or `while let` expression (`while` token already eaten).
     fn parse_while_expr(&mut self, opt_label: Option<Label>,
                             span_lo: Span,
                             mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
@@ -3815,7 +3953,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
     }
 
-    /// Parse a 'while let' expression ('while' token already eaten)
+    /// Parses a `while let` expression (`while` token already eaten).
     fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
                                 span_lo: Span,
                                 mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
@@ -3839,7 +3977,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
     }
 
-    /// Parse an `async move {...}` expression
+    /// Parses an `async move {...}` expression.
     pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
         -> PResult<'a, P<Expr>>
     {
@@ -3857,7 +3995,7 @@ impl<'a> Parser<'a> {
             ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
     }
 
-    /// Parse a `try {...}` expression (`try` token already eaten)
+    /// Parses a `try {...}` expression (`try` token already eaten).
     fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
         -> PResult<'a, P<Expr>>
     {
@@ -3975,15 +4113,15 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse an expression
+    /// Parses an expression.
     #[inline]
     pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
         self.parse_expr_res(Restrictions::empty(), None)
     }
 
-    /// Evaluate the closure with restrictions in place.
+    /// Evaluates the closure with restrictions in place.
     ///
-    /// After the closure is evaluated, restrictions are reset.
+    /// Afters the closure is evaluated, restrictions are reset.
     fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
         where F: FnOnce(&mut Self) -> T
     {
@@ -3995,7 +4133,7 @@ impl<'a> Parser<'a> {
 
     }
 
-    /// Parse an expression, subject to the given restrictions
+    /// Parses an expression, subject to the given restrictions.
     #[inline]
     fn parse_expr_res(&mut self, r: Restrictions,
                           already_parsed_attrs: Option<ThinVec<Attribute>>)
@@ -4003,7 +4141,7 @@ impl<'a> Parser<'a> {
         self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
     }
 
-    /// Parse the RHS of a local variable declaration (e.g., '= 14;')
+    /// Parses the RHS of a local variable declaration (e.g., '= 14;').
     fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
         if self.eat(&token::Eq) {
             Ok(Some(self.parse_expr()?))
@@ -4014,7 +4152,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse patterns, separated by '|' s
+    /// Parses patterns, separated by '|' s.
     fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
         // Allow a '|' before the pats (RFC 1925 + RFC 2530)
         self.eat(&token::BinOp(token::Or));
@@ -4204,7 +4342,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse the fields of a struct-like pattern
+    /// Parses the fields of a struct-like pattern.
     fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
         let mut fields = Vec::new();
         let mut etc = false;
@@ -4396,13 +4534,13 @@ impl<'a> Parser<'a> {
         Ok(pat)
     }
 
-    /// Parse a pattern.
+    /// Parses a pattern.
     pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
         self.parse_pat_with_range_pat(true, expected)
     }
 
-    /// Parse a pattern, with a setting whether modern range patterns e.g., `a..=b`, `a..b` are
-    /// allowed.
+    /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
+    /// allowed).
     fn parse_pat_with_range_pat(
         &mut self,
         allow_range_pat: bool,
@@ -4612,9 +4750,9 @@ impl<'a> Parser<'a> {
         Ok(P(pat))
     }
 
-    /// Parse ident or ident @ pat
+    /// Parses `ident` or `ident @ pat`.
     /// used by the copy foo and ref foo patterns to give a good
-    /// error message when parsing mistakes like ref foo(a,b)
+    /// error message when parsing mistakes like `ref foo(a, b)`.
     fn parse_pat_ident(&mut self,
                        binding_mode: ast::BindingMode)
                        -> PResult<'a, PatKind> {
@@ -4640,7 +4778,7 @@ impl<'a> Parser<'a> {
         Ok(PatKind::Ident(binding_mode, ident, sub))
     }
 
-    /// Parse a local variable declaration
+    /// Parses a local variable declaration.
     fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
         let lo = self.prev_span;
         let pat = self.parse_top_level_pat()?;
@@ -4713,7 +4851,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse a structure field
+    /// Parses a structure field.
     fn parse_name_and_ty(&mut self,
                          lo: Span,
                          vis: Visibility,
@@ -4732,7 +4870,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Emit an expected item after attributes error.
+    /// Emits an expected-item-after-attributes error.
     fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a,  ()> {
         let message = match attrs.last() {
             Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
@@ -5155,13 +5293,13 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Is this expression a successfully-parsed statement?
+    /// Checks if this expression is a successfully parsed statement.
     fn expr_is_complete(&mut self, e: &Expr) -> bool {
         self.restrictions.contains(Restrictions::STMT_EXPR) &&
             !classify::expr_requires_semi_to_be_stmt(e)
     }
 
-    /// Parse a block. No inner attrs are allowed.
+    /// Parses a block. No inner attributes are allowed.
     pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
         maybe_whole!(self, NtBlock, |x| x);
 
@@ -5212,7 +5350,7 @@ impl<'a> Parser<'a> {
                         stmt_span = stmt_span.with_hi(self.prev_span.hi());
                     }
                     let sugg = pprust::to_string(|s| {
-                        use print::pprust::{PrintState, INDENT_UNIT};
+                        use crate::print::pprust::{PrintState, INDENT_UNIT};
                         s.ibox(INDENT_UNIT)?;
                         s.bopen()?;
                         s.print_stmt(&stmt)?;
@@ -5239,7 +5377,7 @@ impl<'a> Parser<'a> {
         self.parse_block_tail(lo, BlockCheckMode::Default)
     }
 
-    /// Parse a block. Inner attrs are allowed.
+    /// Parses a block. Inner attributes are allowed.
     fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
         maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
 
@@ -5249,7 +5387,7 @@ impl<'a> Parser<'a> {
             self.parse_block_tail(lo, BlockCheckMode::Default)?))
     }
 
-    /// Parse the rest of a block expression or function body
+    /// Parses the rest of a block expression or function body.
     /// Precondition: already parsed the '{'.
     fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
         let mut stmts = vec![];
@@ -5283,7 +5421,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse a statement, including the trailing semicolon.
+    /// Parses a statement, including the trailing semicolon.
     crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
         // skip looking for a trailing semicolon when we have an interpolated statement
         maybe_whole!(self, NtStmt, |x| Some(x));
@@ -5345,11 +5483,14 @@ impl<'a> Parser<'a> {
         ).emit();
     }
 
-    // Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
-    // BOUND = TY_BOUND | LT_BOUND
-    // LT_BOUND = LIFETIME (e.g., `'a`)
-    // TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
-    // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+    /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
+    ///
+    /// ```
+    /// BOUND = TY_BOUND | LT_BOUND
+    /// LT_BOUND = LIFETIME (e.g., `'a`)
+    /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
+    /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+    /// ```
     fn parse_generic_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, GenericBounds> {
         let mut bounds = Vec::new();
         loop {
@@ -5403,8 +5544,11 @@ impl<'a> Parser<'a> {
         self.parse_generic_bounds_common(true)
     }
 
-    // Parse bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
-    // BOUND = LT_BOUND (e.g., `'a`)
+    /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
+    ///
+    /// ```
+    /// BOUND = LT_BOUND (e.g., `'a`)
+    /// ```
     fn parse_lt_param_bounds(&mut self) -> GenericBounds {
         let mut lifetimes = Vec::new();
         while self.check_lifetime() {
@@ -5417,7 +5561,7 @@ impl<'a> Parser<'a> {
         lifetimes
     }
 
-    /// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?
+    /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
     fn parse_ty_param(&mut self,
                       preceding_attrs: Vec<Attribute>)
                       -> PResult<'a, GenericParam> {
@@ -5448,6 +5592,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the following grammar:
+    ///
     ///     TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
     fn parse_trait_item_assoc_ty(&mut self)
         -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
@@ -5472,15 +5617,27 @@ impl<'a> Parser<'a> {
         Ok((ident, TraitItemKind::Type(bounds, default), generics))
     }
 
-    /// Parses (possibly empty) list of lifetime and type parameters, possibly including
-    /// trailing comma and erroneous trailing attributes.
+    fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
+        self.expect_keyword(keywords::Const)?;
+        let ident = self.parse_ident()?;
+        self.expect(&token::Colon)?;
+        let ty = self.parse_ty()?;
+
+        Ok(GenericParam {
+            ident,
+            id: ast::DUMMY_NODE_ID,
+            attrs: preceding_attrs.into(),
+            bounds: Vec::new(),
+            kind: GenericParamKind::Const {
+                ty,
+            }
+        })
+    }
+
+    /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
+    /// a trailing comma and erroneous trailing attributes.
     crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
-        let mut lifetimes = Vec::new();
         let mut params = Vec::new();
-        let mut seen_ty_param: Option<Span> = None;
-        let mut last_comma_span = None;
-        let mut bad_lifetime_pos = vec![];
-        let mut suggestions = vec![];
         loop {
             let attrs = self.parse_outer_attributes()?;
             if self.check_lifetime() {
@@ -5491,39 +5648,40 @@ impl<'a> Parser<'a> {
                 } else {
                     Vec::new()
                 };
-                lifetimes.push(ast::GenericParam {
+                params.push(ast::GenericParam {
                     ident: lifetime.ident,
                     id: lifetime.id,
                     attrs: attrs.into(),
                     bounds,
                     kind: ast::GenericParamKind::Lifetime,
                 });
-                if let Some(sp) = seen_ty_param {
-                    let remove_sp = last_comma_span.unwrap_or(self.prev_span).to(self.prev_span);
-                    bad_lifetime_pos.push(self.prev_span);
-                    if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.prev_span) {
-                        suggestions.push((remove_sp, String::new()));
-                        suggestions.push((
-                            sp.shrink_to_lo(),
-                            format!("{}, ", snippet)));
-                    }
-                }
+            } else if self.check_keyword(keywords::Const) {
+                // Parse const parameter.
+                params.push(self.parse_const_param(attrs)?);
             } else if self.check_ident() {
                 // Parse type parameter.
                 params.push(self.parse_ty_param(attrs)?);
-                if seen_ty_param.is_none() {
-                    seen_ty_param = Some(self.prev_span);
-                }
             } else {
                 // Check for trailing attributes and stop parsing.
                 if !attrs.is_empty() {
-                    let param_kind = if seen_ty_param.is_some() { "type" } else { "lifetime" };
-                    self.struct_span_err(
-                        attrs[0].span,
-                        &format!("trailing attribute after {} parameters", param_kind),
-                    )
-                    .span_label(attrs[0].span, "attributes must go before parameters")
-                    .emit();
+                    if !params.is_empty() {
+                        self.struct_span_err(
+                            attrs[0].span,
+                            &format!("trailing attribute after generic parameter"),
+                        )
+                        .span_label(attrs[0].span, "attributes must go before parameters")
+                        .emit();
+                    } else {
+                        self.struct_span_err(
+                            attrs[0].span,
+                            &format!("attribute without generic parameters"),
+                        )
+                        .span_label(
+                            attrs[0].span,
+                            "attributes are only permitted when preceding parameters",
+                        )
+                        .emit();
+                    }
                 }
                 break
             }
@@ -5531,27 +5689,11 @@ impl<'a> Parser<'a> {
             if !self.eat(&token::Comma) {
                 break
             }
-            last_comma_span = Some(self.prev_span);
-        }
-        if !bad_lifetime_pos.is_empty() {
-            let mut err = self.struct_span_err(
-                bad_lifetime_pos,
-                "lifetime parameters must be declared prior to type parameters",
-            );
-            if !suggestions.is_empty() {
-                err.multipart_suggestion(
-                    "move the lifetime parameter prior to the first type parameter",
-                    suggestions,
-                    Applicability::MachineApplicable,
-                );
-            }
-            err.emit();
         }
-        lifetimes.extend(params);  // ensure the correct order of lifetimes and type params
-        Ok(lifetimes)
+        Ok(params)
     }
 
-    /// Parse a set of optional generic type parameter declarations. Where
+    /// Parses a set of optional generic type parameter declarations. Where
     /// clauses are not parsed here, and must be added later via
     /// `parse_where_clause()`.
     ///
@@ -5579,7 +5721,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse generic args (within a path segment) with recovery for extra leading angle brackets.
+    /// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
     /// For the purposes of understanding the parsing logic of generic arguments, this function
     /// can be thought of being the same as just calling `self.parse_generic_args()` if the source
     /// had the correct amount of leading angle brackets.
@@ -5730,35 +5872,16 @@ impl<'a> Parser<'a> {
     fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
         let mut args = Vec::new();
         let mut bindings = Vec::new();
+        let mut misplaced_assoc_ty_bindings: Vec<Span> = Vec::new();
+        let mut assoc_ty_bindings: Vec<Span> = Vec::new();
 
-        let mut seen_type = false;
-        let mut seen_binding = false;
+        let args_lo = self.span;
 
-        let mut last_comma_span = None;
-        let mut first_type_or_binding_span: Option<Span> = None;
-        let mut first_binding_span: Option<Span> = None;
-
-        let mut bad_lifetime_pos = vec![];
-        let mut bad_type_pos = vec![];
-
-        let mut lifetime_suggestions = vec![];
-        let mut type_suggestions = vec![];
         loop {
             if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
                 // Parse lifetime argument.
                 args.push(GenericArg::Lifetime(self.expect_lifetime()));
-
-                if seen_type || seen_binding {
-                    let remove_sp = last_comma_span.unwrap_or(self.prev_span).to(self.prev_span);
-                    bad_lifetime_pos.push(self.prev_span);
-
-                    if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.prev_span) {
-                        lifetime_suggestions.push((remove_sp, String::new()));
-                        lifetime_suggestions.push((
-                            first_type_or_binding_span.unwrap().shrink_to_lo(),
-                            format!("{}, ", snippet)));
-                    }
-                }
+                misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
             } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) {
                 // Parse associated type binding.
                 let lo = self.span;
@@ -5772,134 +5895,67 @@ impl<'a> Parser<'a> {
                     ty,
                     span,
                 });
-
-                seen_binding = true;
-                if first_type_or_binding_span.is_none() {
-                    first_type_or_binding_span = Some(span);
-                }
-                if first_binding_span.is_none() {
-                    first_binding_span = Some(span);
-                }
+                assoc_ty_bindings.push(span);
+            } else if self.check_const_arg() {
+                // FIXME(const_generics): to distinguish between idents for types and consts,
+                // we should introduce a GenericArg::Ident in the AST and distinguish when
+                // lowering to the HIR. For now, idents for const args are not permitted.
+
+                // Parse const argument.
+                let expr = if let token::OpenDelim(token::Brace) = self.token {
+                    self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
+                } else if self.token.is_ident() {
+                    // FIXME(const_generics): to distinguish between idents for types and consts,
+                    // we should introduce a GenericArg::Ident in the AST and distinguish when
+                    // lowering to the HIR. For now, idents for const args are not permitted.
+                    return Err(
+                        self.fatal("identifiers may currently not be used for const generics")
+                    );
+                } else {
+                    // FIXME(const_generics): this currently conflicts with emplacement syntax
+                    // with negative integer literals.
+                    self.parse_literal_maybe_minus()?
+                };
+                let value = AnonConst {
+                    id: ast::DUMMY_NODE_ID,
+                    value: expr,
+                };
+                args.push(GenericArg::Const(value));
+                misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
             } else if self.check_type() {
                 // Parse type argument.
-                let ty_param = self.parse_ty()?;
-                if seen_binding {
-                    let remove_sp = last_comma_span.unwrap_or(self.prev_span).to(self.prev_span);
-                    bad_type_pos.push(self.prev_span);
-
-                    if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.prev_span) {
-                        type_suggestions.push((remove_sp, String::new()));
-                        type_suggestions.push((
-                            first_binding_span.unwrap().shrink_to_lo(),
-                            format!("{}, ", snippet)));
-                    }
-                }
-
-                if first_type_or_binding_span.is_none() {
-                    first_type_or_binding_span = Some(ty_param.span);
-                }
-                args.push(GenericArg::Type(ty_param));
-                seen_type = true;
+                args.push(GenericArg::Type(self.parse_ty()?));
+                misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
             } else {
                 break
             }
 
             if !self.eat(&token::Comma) {
                 break
-            } else {
-                last_comma_span = Some(self.prev_span);
-            }
-        }
-
-        self.maybe_report_incorrect_generic_argument_order(
-            bad_lifetime_pos, bad_type_pos, lifetime_suggestions, type_suggestions
-        );
-
-        Ok((args, bindings))
-    }
-
-    /// Maybe report an error about incorrect generic argument order - "lifetime parameters
-    /// must be declared before type parameters", "type parameters must be declared before
-    /// associated type bindings" or both.
-    fn maybe_report_incorrect_generic_argument_order(
-        &self,
-        bad_lifetime_pos: Vec<Span>,
-        bad_type_pos: Vec<Span>,
-        lifetime_suggestions: Vec<(Span, String)>,
-        type_suggestions: Vec<(Span, String)>,
-    ) {
-        let mut err = if !bad_lifetime_pos.is_empty() && !bad_type_pos.is_empty() {
-            let mut positions = bad_lifetime_pos.clone();
-            positions.extend_from_slice(&bad_type_pos);
-
-            self.struct_span_err(
-                positions,
-                "generic arguments must declare lifetimes, types and associated type bindings in \
-                 that order",
-            )
-        } else if !bad_lifetime_pos.is_empty() {
-            self.struct_span_err(
-                bad_lifetime_pos.clone(),
-                "lifetime parameters must be declared prior to type parameters"
-            )
-        } else if !bad_type_pos.is_empty() {
-            self.struct_span_err(
-                bad_type_pos.clone(),
-                "type parameters must be declared prior to associated type bindings"
-            )
-        } else {
-            return;
-        };
-
-        if !bad_lifetime_pos.is_empty() {
-            for sp in &bad_lifetime_pos {
-                err.span_label(*sp, "must be declared prior to type parameters");
-            }
-        }
-
-        if !bad_type_pos.is_empty() {
-            for sp in &bad_type_pos {
-                err.span_label(*sp, "must be declared prior to associated type bindings");
             }
         }
 
-        if !lifetime_suggestions.is_empty() && !type_suggestions.is_empty() {
-            let mut suggestions = lifetime_suggestions;
-            suggestions.extend_from_slice(&type_suggestions);
-
-            let plural = bad_lifetime_pos.len() + bad_type_pos.len() > 1;
-            err.multipart_suggestion(
-                &format!(
-                    "move the parameter{}",
-                    if plural { "s" } else { "" },
-                ),
-                suggestions,
-                Applicability::MachineApplicable,
-            );
-        } else if !lifetime_suggestions.is_empty() {
-            err.multipart_suggestion(
-                &format!(
-                    "move the lifetime parameter{} prior to the first type parameter",
-                    if bad_lifetime_pos.len() > 1 { "s" } else { "" },
-                ),
-                lifetime_suggestions,
-                Applicability::MachineApplicable,
-            );
-        } else if !type_suggestions.is_empty() {
-            err.multipart_suggestion(
-                &format!(
-                    "move the type parameter{} prior to the first associated type binding",
-                    if bad_type_pos.len() > 1 { "s" } else { "" },
-                ),
-                type_suggestions,
-                Applicability::MachineApplicable,
+        // FIXME: we would like to report this in ast_validation instead, but we currently do not
+        // preserve ordering of generic parameters with respect to associated type binding, so we
+        // lose that information after parsing.
+        if misplaced_assoc_ty_bindings.len() > 0 {
+            let mut err = self.struct_span_err(
+                args_lo.to(self.prev_span),
+                "associated type bindings must be declared after generic parameters",
             );
+            for span in misplaced_assoc_ty_bindings {
+                err.span_label(
+                    span,
+                    "this associated type binding should be moved after the generic parameters",
+                );
+            }
+            err.emit();
         }
 
-        err.emit();
+        Ok((args, bindings))
     }
 
-    /// Parses an optional `where` clause and places it in `generics`.
+    /// Parses an optional where-clause and places it in `generics`.
     ///
     /// ```ignore (only-for-syntax-highlight)
     /// where T : Trait<U, V> + 'b, 'a : 'b
@@ -6001,7 +6057,7 @@ impl<'a> Parser<'a> {
 
         let sp = self.span;
         let mut variadic = false;
-        let args: Vec<Option<Arg>> =
+        let (args, recovered): (Vec<Option<Arg>>, bool) =
             self.parse_seq_to_before_end(
                 &token::CloseDelim(token::Paren),
                 SeqSep::trailing_allowed(token::Comma),
@@ -6049,7 +6105,9 @@ impl<'a> Parser<'a> {
                 }
             )?;
 
-        self.eat(&token::CloseDelim(token::Paren));
+        if !recovered {
+            self.eat(&token::CloseDelim(token::Paren));
+        }
 
         let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
 
@@ -6061,7 +6119,7 @@ impl<'a> Parser<'a> {
         Ok((args, variadic))
     }
 
-    /// Parse the argument list and result type of a function declaration
+    /// Parses the argument list and result type of a function declaration.
     fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> {
 
         let (args, variadic) = self.parse_fn_args(true, allow_variadic)?;
@@ -6183,7 +6241,7 @@ impl<'a> Parser<'a> {
         Ok(Some(Arg::from_self(eself, eself_ident)))
     }
 
-    /// Parse the parameter list and result type of a function that may have a `self` parameter.
+    /// Parses the parameter list and result type of a function that may have a `self` parameter.
     fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a,  Arg>,
     {
@@ -6194,15 +6252,15 @@ impl<'a> Parser<'a> {
 
         // Parse the rest of the function parameter list.
         let sep = SeqSep::trailing_allowed(token::Comma);
-        let fn_inputs = if let Some(self_arg) = self_arg {
+        let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
             if self.check(&token::CloseDelim(token::Paren)) {
-                vec![self_arg]
+                (vec![self_arg], false)
             } else if self.eat(&token::Comma) {
                 let mut fn_inputs = vec![self_arg];
-                fn_inputs.append(&mut self.parse_seq_to_before_end(
-                    &token::CloseDelim(token::Paren), sep, parse_arg_fn)?
-                );
-                fn_inputs
+                let (mut input, recovered) = self.parse_seq_to_before_end(
+                    &token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
+                fn_inputs.append(&mut input);
+                (fn_inputs, recovered)
             } else {
                 return self.unexpected();
             }
@@ -6210,8 +6268,10 @@ impl<'a> Parser<'a> {
             self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
         };
 
-        // Parse closing paren and return type.
-        self.expect(&token::CloseDelim(token::Paren))?;
+        if !recovered {
+            // Parse closing paren and return type.
+            self.expect(&token::CloseDelim(token::Paren))?;
+        }
         Ok(P(FnDecl {
             inputs: fn_inputs,
             output: self.parse_ret_ty(true)?,
@@ -6219,7 +6279,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    // parse the |arg, arg| header on a lambda
+    /// Parses the `|arg, arg|` header of a closure.
     fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
         let inputs_captures = {
             if self.eat(&token::OrOr) {
@@ -6231,7 +6291,7 @@ impl<'a> Parser<'a> {
                     SeqSep::trailing_allowed(token::Comma),
                     TokenExpectType::NoExpect,
                     |p| p.parse_fn_block_arg()
-                )?;
+                )?.0;
                 self.expect_or()?;
                 args
             }
@@ -6245,7 +6305,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse the name and optional generic types of a function header.
+    /// Parses the name and optional generic types of a function header.
     fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
         let id = self.parse_ident()?;
         let generics = self.parse_generics()?;
@@ -6265,7 +6325,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse an item-position function declaration.
+    /// Parses an item-position function declaration.
     fn parse_item_fn(&mut self,
                      unsafety: Unsafety,
                      asyncness: IsAsync,
@@ -6280,21 +6340,22 @@ impl<'a> Parser<'a> {
         Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
     }
 
-    /// true if we are looking at `const ID`, false for things like `const fn` etc
+    /// Returns `true` if we are looking at `const ID`
+    /// (returns `false` for things like `const fn`, etc.).
     fn is_const_item(&mut self) -> bool {
         self.token.is_keyword(keywords::Const) &&
             !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
             !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
     }
 
-    /// parses all the "front matter" for a `fn` declaration, up to
+    /// Parses all the "front matter" for a `fn` declaration, up to
     /// and including the `fn` keyword:
     ///
     /// - `const fn`
     /// - `unsafe fn`
     /// - `const unsafe fn`
     /// - `extern fn`
-    /// - etc
+    /// - etc.
     fn parse_fn_front_matter(&mut self)
         -> PResult<'a, (
             Spanned<Constness>,
@@ -6321,7 +6382,7 @@ impl<'a> Parser<'a> {
         Ok((constness, unsafety, asyncness, abi))
     }
 
-    /// Parse an impl item.
+    /// Parses an impl item.
     pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
         maybe_whole!(self, NtImplItem, |x| x);
         let attrs = self.parse_outer_attributes()?;
@@ -6460,7 +6521,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse `trait Foo { ... }` or `trait Foo = Bar;`
+    /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
     fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
         let ident = self.parse_ident()?;
         let mut tps = self.parse_generics()?;
@@ -6516,6 +6577,7 @@ impl<'a> Parser<'a> {
         //     `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
         //     `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
         //     `<` (LIFETIME|IDENT) `=` - generic parameter with a default
+        //     `<` const                - generic const parameter
         // The only truly ambiguous case is
         //     `<` IDENT `>` `::` IDENT ...
         // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
@@ -6525,7 +6587,8 @@ impl<'a> Parser<'a> {
             (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
              self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
                 self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
-                                       t == &token::Colon || t == &token::Eq))
+                                       t == &token::Colon || t == &token::Eq) ||
+             self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
     }
 
     fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
@@ -6549,9 +6612,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an implementation item, `impl` keyword is already parsed.
+    ///
     ///    impl<'a, T> TYPE { /* impl items */ }
     ///    impl<'a, T> TRAIT for TYPE { /* impl items */ }
     ///    impl<'a, T> !TRAIT for TYPE { /* impl items */ }
+    ///
     /// We actually parse slightly more relaxed grammar for better error reporting and recovery.
     ///     `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
     ///     `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
@@ -6643,7 +6708,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse struct Foo { ... }
+    /// Parses `struct Foo { ... }`.
     fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
         let class_name = self.parse_ident()?;
 
@@ -6697,7 +6762,7 @@ impl<'a> Parser<'a> {
         Ok((class_name, ItemKind::Struct(vdata, generics), None))
     }
 
-    /// Parse union Foo { ... }
+    /// Parses `union Foo { ... }`.
     fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
         let class_name = self.parse_ident()?;
 
@@ -6791,7 +6856,7 @@ impl<'a> Parser<'a> {
         Ok(fields)
     }
 
-    /// Parse a structure field declaration
+    /// Parses a structure field declaration.
     fn parse_single_struct_field(&mut self,
                                      lo: Span,
                                      vis: Visibility,
@@ -6853,7 +6918,7 @@ impl<'a> Parser<'a> {
         Ok(a_var)
     }
 
-    /// Parse an element of a struct definition
+    /// Parses an element of a struct declaration.
     fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
         let attrs = self.parse_outer_attributes()?;
         let lo = self.span;
@@ -6861,11 +6926,11 @@ impl<'a> Parser<'a> {
         self.parse_single_struct_field(lo, vis, attrs)
     }
 
-    /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
+    /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
     /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
-    /// If the following element can't be a tuple (i.e., it's a function definition,
-    /// it's not a tuple struct field) and the contents within the parens
-    /// isn't valid, emit a proper diagnostic.
+    /// If the following element can't be a tuple (i.e., it's a function definition), then
+    /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
+    /// so emit a proper diagnostic.
     pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
         maybe_whole!(self, NtVis, |x| x);
 
@@ -6946,7 +7011,7 @@ impl<'a> Parser<'a> {
         Ok(respan(lo, VisibilityKind::Public))
     }
 
-    /// Parse defaultness: `default` or nothing.
+    /// Parses defaultness (i.e., `default` or nothing).
     fn parse_defaultness(&mut self) -> Defaultness {
         // `pub` is included for better error messages
         if self.check_keyword(keywords::Default) &&
@@ -6995,7 +7060,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Given a termination token, parse all of the items in a module
+    /// Given a termination token, parses all of the items in a module.
     fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
         let mut items = vec![];
         while let Some(item) = self.parse_item()? {
@@ -7042,11 +7107,12 @@ impl<'a> Parser<'a> {
     /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
     fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
         let (in_cfg, outer_attrs) = {
-            let mut strip_unconfigured = ::config::StripUnconfigured {
+            let mut strip_unconfigured = crate::config::StripUnconfigured {
                 sess: self.sess,
                 features: None, // don't perform gated feature checking
             };
-            let outer_attrs = strip_unconfigured.process_cfg_attrs(outer_attrs.to_owned());
+            let mut outer_attrs = outer_attrs.to_owned();
+            strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
             (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
         };
 
@@ -7131,7 +7197,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Returns either a path to a module, or .
+    /// Returns a path to a module.
     pub fn default_submod_path(
         id: ast::Ident,
         relative: Option<ast::Ident>,
@@ -7274,7 +7340,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Read a module from a source file.
+    /// Reads a module from a source file.
     fn eval_src_mod(&mut self,
                     path: PathBuf,
                     directory_ownership: DirectoryOwnership,
@@ -7306,7 +7372,7 @@ impl<'a> Parser<'a> {
         Ok((m0, mod_attrs))
     }
 
-    /// Parse a function declaration from a foreign module
+    /// Parses a function declaration from a foreign module.
     fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
                              -> PResult<'a, ForeignItem> {
         self.expect_keyword(keywords::Fn)?;
@@ -7326,7 +7392,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse a static item from a foreign module.
+    /// Parses a static item from a foreign module.
     /// Assumes that the `static` keyword is already parsed.
     fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
                                  -> PResult<'a, ForeignItem> {
@@ -7346,7 +7412,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse a type from a foreign module
+    /// Parses a type from a foreign module.
     fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
                              -> PResult<'a, ForeignItem> {
         self.expect_keyword(keywords::Type)?;
@@ -7405,12 +7471,14 @@ impl<'a> Parser<'a> {
         Ok(ident)
     }
 
-    /// Parse extern crate links
+    /// Parses `extern crate` links.
     ///
     /// # Examples
     ///
+    /// ```
     /// extern crate foo;
     /// extern crate bar as foo;
+    /// ```
     fn parse_item_extern_crate(&mut self,
                                lo: Span,
                                visibility: Visibility,
@@ -7429,16 +7497,17 @@ impl<'a> Parser<'a> {
         Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
     }
 
-    /// Parse `extern` for foreign ABIs
-    /// modules.
+    /// Parses `extern` for foreign ABIs modules.
     ///
     /// `extern` is expected to have been
-    /// consumed before calling this method
+    /// consumed before calling this method.
     ///
-    /// # Examples:
+    /// # Examples
     ///
+    /// ```ignore (only-for-syntax-highlight)
     /// extern "C" {}
     /// extern {}
+    /// ```
     fn parse_item_foreign_mod(&mut self,
                               lo: Span,
                               opt_abi: Option<Abi>,
@@ -7465,11 +7534,12 @@ impl<'a> Parser<'a> {
         Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
     }
 
-    /// Parse `type Foo = Bar;`
+    /// Parses `type Foo = Bar;`
     /// or
     /// `existential type Foo: Bar;`
     /// or
-    /// `return None` without modifying the parser state
+    /// `return `None``
+    /// without modifying the parser state.
     fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
         // This parses the grammar:
         //     Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
@@ -7484,7 +7554,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse type alias or existential type
+    /// Parses a type alias or existential type.
     fn parse_existential_or_alias(
         &mut self,
         existential: bool,
@@ -7505,7 +7575,7 @@ impl<'a> Parser<'a> {
         Ok((ident, alias, tps))
     }
 
-    /// Parse the part of an "enum" decl following the '{'
+    /// Parses the part of an enum declaration following the `{`.
     fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
         let mut variants = Vec::new();
         let mut all_nullary = true;
@@ -7564,7 +7634,7 @@ impl<'a> Parser<'a> {
         Ok(ast::EnumDef { variants })
     }
 
-    /// Parse an "enum" declaration
+    /// Parses an enum declaration.
     fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
         let id = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
@@ -7660,7 +7730,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse one of the items allowed by the flags.
+    /// Parses one of the items allowed by the flags.
     fn parse_item_implementation(
         &mut self,
         attrs: Vec<Attribute>,
@@ -8085,7 +8155,7 @@ impl<'a> Parser<'a> {
         self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
     }
 
-    /// Parse a foreign item.
+    /// Parses a foreign item.
     crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
         maybe_whole!(self, NtForeignItem, |ni| ni);
 
@@ -8201,7 +8271,7 @@ impl<'a> Parser<'a> {
         Ok(None)
     }
 
-    /// Parse a macro invocation inside a `trait`, `impl` or `extern` block
+    /// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
     fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
                                at_end: &mut bool) -> PResult<'a, Option<Mac>>
     {
@@ -8227,7 +8297,7 @@ impl<'a> Parser<'a> {
             // eat a matched-delimiter token tree:
             let (delim, tts) = self.expect_delimited_token_tree()?;
             if delim != MacDelimiter::Brace {
-                self.expect(&token::Semi)?
+                self.expect(&token::Semi)?;
             }
 
             Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
@@ -8304,13 +8374,15 @@ impl<'a> Parser<'a> {
                                    *t == token::BinOp(token::Star))
     }
 
-    /// Parse UseTree
+    /// Parses a `UseTree`.
     ///
+    /// ```
     /// USE_TREE = [`::`] `*` |
     ///            [`::`] `{` USE_TREE_LIST `}` |
     ///            PATH `::` `*` |
     ///            PATH `::` `{` USE_TREE_LIST `}` |
     ///            PATH [`as` IDENT]
+    /// ```
     fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
         let lo = self.span;
 
@@ -8349,9 +8421,11 @@ impl<'a> Parser<'a> {
         Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
     }
 
-    /// Parse UseTreeKind::Nested(list)
+    /// Parses a `UseTreeKind::Nested(list)`.
     ///
+    /// ```
     /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
+    /// ```
     fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
         self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
                                  &token::CloseDelim(token::Brace),
@@ -8368,15 +8442,17 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parses a source module as a crate. This is the main
-    /// entry point for the parser.
+    /// Parses a source module as a crate. This is the main entry point for the parser.
     pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
         let lo = self.span;
-        Ok(ast::Crate {
+        let krate = Ok(ast::Crate {
             attrs: self.parse_inner_attributes()?,
             module: self.parse_mod_items(&token::Eof, lo)?,
             span: lo.to(self.span),
-        })
+        });
+        emit_unclosed_delims(&self.unclosed_delims, self.diagnostic());
+        self.unclosed_delims.clear();
+        krate
     }
 
     pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
@@ -8405,3 +8481,20 @@ impl<'a> Parser<'a> {
         }
     }
 }
+
+pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) {
+    for unmatched in unclosed_delims {
+        let mut err = handler.struct_span_err(unmatched.found_span, &format!(
+            "incorrect close delimiter: `{}`",
+            pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+        ));
+        err.span_label(unmatched.found_span, "incorrect close delimiter");
+        if let Some(sp) = unmatched.candidate_span {
+            err.span_label(sp, "close delimiter possibly meant for this");
+        }
+        if let Some(sp) = unmatched.unclosed_span {
+            err.span_label(sp, "un-closed delimiter");
+        }
+        err.emit();
+    }
+}
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index f06e975a6d9..ff7f3e0bfae 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,22 +1,27 @@
-pub use self::BinOpToken::*;
-pub use self::Nonterminal::*;
-pub use self::DelimToken::*;
-pub use self::Lit::*;
-pub use self::Token::*;
-
-use ast::{self};
-use parse::ParseSess;
-use print::pprust;
-use ptr::P;
+pub use BinOpToken::*;
+pub use Nonterminal::*;
+pub use DelimToken::*;
+pub use Lit::*;
+pub use Token::*;
+
+use crate::ast::{self};
+use crate::parse::ParseSess;
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::syntax::parse::parse_stream_from_source_str;
+use crate::syntax::parse::parser::emit_unclosed_delims;
+use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+
 use serialize::{Decodable, Decoder, Encodable, Encoder};
-use symbol::keywords;
-use syntax::parse::parse_stream_from_source_str;
-use syntax_pos::{self, Span, FileName};
 use syntax_pos::symbol::{self, Symbol};
-use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+use syntax_pos::{self, Span, FileName};
+use log::info;
 
 use std::{cmp, fmt};
 use std::mem;
+#[cfg(target_arch = "x86_64")]
+use rustc_data_structures::static_assert;
 use rustc_data_structures::sync::{Lrc, Lock};
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -33,16 +38,16 @@ pub enum BinOpToken {
     Shr,
 }
 
-/// A delimiter token
+/// A delimiter token.
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum DelimToken {
-    /// A round parenthesis: `(` or `)`
+    /// A round parenthesis (i.e., `(` or `)`).
     Paren,
-    /// A square bracket: `[` or `]`
+    /// A square bracket (i.e., `[` or `]`).
     Bracket,
-    /// A curly brace: `{` or `}`
+    /// A curly brace (i.e., `{` or `}`).
     Brace,
-    /// An empty delimiter
+    /// An empty delimiter.
     NoDelim,
 }
 
@@ -167,9 +172,9 @@ pub enum Token {
     Question,
     /// Used by proc macros for representing lifetimes, not generated by lexer right now.
     SingleQuote,
-    /// An opening delimiter, eg. `{`
+    /// An opening delimiter (e.g., `{`).
     OpenDelim(DelimToken),
-    /// A closing delimiter, eg. `}`
+    /// A closing delimiter (e.g., `}`).
     CloseDelim(DelimToken),
 
     /* Literals */
@@ -183,16 +188,16 @@ pub enum Token {
     // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
     Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
     // Can be expanded into several tokens.
-    /// Doc comment
+    /// A doc comment.
     DocComment(ast::Name),
 
     // Junk. These carry no data because we don't really care about the data
     // they *would* carry, and don't really want to allocate a new ident for
     // them. Instead, users could extract that from the associated span.
 
-    /// Whitespace
+    /// Whitespace.
     Whitespace,
-    /// Comment
+    /// A comment.
     Comment,
     Shebang(ast::Name),
 
@@ -275,6 +280,20 @@ impl Token {
         }
     }
 
+    /// Returns `true` if the token can appear at the start of a const param.
+    pub fn can_begin_const_arg(&self) -> bool {
+        match self {
+            OpenDelim(Brace) => true,
+            Interpolated(ref nt) => match nt.0 {
+                NtExpr(..) => true,
+                NtBlock(..) => true,
+                NtLiteral(..) => true,
+                _ => false,
+            }
+            _ => self.can_begin_literal_or_bool(),
+        }
+    }
+
     /// Returns `true` if the token can appear at the start of a generic bound.
     crate fn can_begin_bound(&self) -> bool {
         self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
@@ -289,7 +308,7 @@ impl Token {
         }
     }
 
-    /// Returns `true` if the token is any literal, a minus (which can follow a literal,
+    /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
     /// for example a '-42', or one of the boolean idents).
     crate fn can_begin_literal_or_bool(&self) -> bool {
         match *self {
@@ -483,8 +502,8 @@ impl Token {
     /// Enables better error recovery when the wrong token is found.
     crate fn similar_tokens(&self) -> Option<Vec<Token>> {
         match *self {
-            Comma => Some(vec![Dot, Lt]),
-            Semi => Some(vec![Colon]),
+            Comma => Some(vec![Dot, Lt, Semi]),
+            Semi => Some(vec![Colon, Comma]),
             _ => None
         }
     }
@@ -541,7 +560,10 @@ impl Token {
             // FIXME(#43081): Avoid this pretty-print + reparse hack
             let source = pprust::token_to_string(self);
             let filename = FileName::macro_expansion_source_code(&source);
-            parse_stream_from_source_str(filename, source, sess, Some(span))
+            let (tokens, errors) = parse_stream_from_source_str(
+                filename, source, sess, Some(span));
+            emit_unclosed_delims(&errors, &sess.span_diagnostic);
+            tokens
         });
 
         // During early phases of the compiler the AST could get modified
@@ -683,7 +705,7 @@ impl PartialEq for Nonterminal {
 }
 
 impl fmt::Debug for Nonterminal {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             NtItem(..) => f.pad("NtItem(..)"),
             NtBlock(..) => f.pad("NtBlock(..)"),
@@ -729,13 +751,13 @@ impl PartialEq for LazyTokenStream {
 }
 
 impl fmt::Debug for LazyTokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Debug::fmt(&self.clone().0.into_inner(), f)
     }
 }
 
 impl LazyTokenStream {
-    fn new() -> Self {
+    pub fn new() -> Self {
         LazyTokenStream(Lock::new(None))
     }
 
@@ -782,12 +804,13 @@ fn prepend_attrs(sess: &ParseSess,
         let source = pprust::attr_to_string(attr);
         let macro_filename = FileName::macro_expansion_source_code(&source);
         if attr.is_sugared_doc {
-            let stream = parse_stream_from_source_str(
+            let (stream, errors) = parse_stream_from_source_str(
                 macro_filename,
                 source,
                 sess,
                 Some(span),
             );
+            emit_unclosed_delims(&errors, &sess.span_diagnostic);
             builder.push(stream);
             continue
         }
@@ -804,12 +827,13 @@ fn prepend_attrs(sess: &ParseSess,
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
         } else {
-            let stream = parse_stream_from_source_str(
+            let (stream, errors) = parse_stream_from_source_str(
                 macro_filename,
                 source,
                 sess,
                 Some(span),
             );
+            emit_unclosed_delims(&errors, &sess.span_diagnostic);
             brackets.push(stream);
         }
 
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 5232b8333e6..d8a8cbb655b 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -1,10 +1,10 @@
 //! This pretty-printer is a direct reimplementation of Philip Karlton's
 //! Mesa pretty-printer, as described in appendix A of
 //!
-//! ````text
+//! ```text
 //! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen.
 //! Stanford Department of Computer Science, 1979.
-//! ````
+//! ```
 //!
 //! The algorithm's aim is to break a stream into as few lines as possible
 //! while respecting the indentation-consistency requirements of the enclosing
@@ -138,6 +138,7 @@ use std::collections::VecDeque;
 use std::fmt;
 use std::io;
 use std::borrow::Cow;
+use log::debug;
 
 /// How to break. Described in more detail in the module docs.
 #[derive(Clone, Copy, PartialEq)]
@@ -192,7 +193,7 @@ impl Token {
 }
 
 impl fmt::Display for Token {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             Token::String(ref s, len) => write!(f, "STR({},{})", s, len),
             Token::Break(_) => f.write_str("BREAK"),
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 7cecf4b9af7..cdf805176a2 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -1,21 +1,22 @@
+use crate::ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
+use crate::ast::{SelfKind, GenericBound, TraitBoundModifier};
+use crate::ast::{Attribute, MacDelimiter, GenericArg};
+use crate::util::parser::{self, AssocOp, Fixity};
+use crate::attr;
+use crate::source_map::{self, SourceMap, Spanned};
+use crate::parse::token::{self, BinOpToken, Token};
+use crate::parse::lexer::comments;
+use crate::parse::{self, ParseSess};
+use crate::print::pp::{self, Breaks};
+use crate::print::pp::Breaks::{Consistent, Inconsistent};
+use crate::ptr::P;
+use crate::std_inject;
+use crate::symbol::keywords;
+use crate::tokenstream::{self, TokenStream, TokenTree};
+
 use rustc_target::spec::abi::{self, Abi};
-use ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
-use ast::{SelfKind, GenericBound, TraitBoundModifier};
-use ast::{Attribute, MacDelimiter, GenericArg};
-use util::parser::{self, AssocOp, Fixity};
-use attr;
-use source_map::{self, SourceMap, Spanned};
 use syntax_pos::{self, BytePos};
-use parse::token::{self, BinOpToken, Token};
-use parse::lexer::comments;
-use parse::{self, ParseSess};
-use print::pp::{self, Breaks};
-use print::pp::Breaks::{Consistent, Inconsistent};
-use ptr::P;
-use std_inject;
-use symbol::keywords;
 use syntax_pos::{DUMMY_SP, FileName};
-use tokenstream::{self, TokenStream, TokenTree};
 
 use std::ascii;
 use std::borrow::Cow;
@@ -34,8 +35,8 @@ pub enum AnnNode<'a> {
 }
 
 pub trait PpAnn {
-    fn pre(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) }
-    fn post(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) }
+    fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) -> io::Result<()> { Ok(()) }
+    fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) -> io::Result<()> { Ok(()) }
 }
 
 #[derive(Copy, Clone)]
@@ -150,7 +151,7 @@ impl<'a> State<'a> {
 }
 
 pub fn to_string<F>(f: F) -> String where
-    F: FnOnce(&mut State) -> io::Result<()>,
+    F: FnOnce(&mut State<'_>) -> io::Result<()>,
 {
     let mut wr = Vec::new();
     {
@@ -605,7 +606,7 @@ pub trait PrintState<'a> {
         match lit.node {
             ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
             ast::LitKind::Err(st) => {
-                let st = st.as_str().escape_debug();
+                let st = st.as_str().escape_debug().to_string();
                 let mut res = String::with_capacity(st.len() + 2);
                 res.push('\'');
                 res.push_str(&st);
@@ -969,7 +970,7 @@ impl<'a> State<'a> {
                                   elts: &[T],
                                   mut op: F,
                                   mut get_span: G) -> io::Result<()> where
-        F: FnMut(&mut State, &T) -> io::Result<()>,
+        F: FnMut(&mut State<'_>, &T) -> io::Result<()>,
         G: FnMut(&T) -> syntax_pos::Span,
     {
         self.rbox(0, b)?;
@@ -1024,6 +1025,7 @@ impl<'a> State<'a> {
         match generic_arg {
             GenericArg::Lifetime(lt) => self.print_lifetime(*lt),
             GenericArg::Type(ty) => self.print_type(ty),
+            GenericArg::Const(ct) => self.print_expr(&ct.value),
         }
     }
 
@@ -2928,7 +2930,7 @@ impl<'a> State<'a> {
                     s.print_outer_attributes_inline(&param.attrs)?;
                     let lt = ast::Lifetime { id: param.id, ident: param.ident };
                     s.print_lifetime_bounds(lt, &param.bounds)
-                },
+                }
                 ast::GenericParamKind::Type { ref default } => {
                     s.print_outer_attributes_inline(&param.attrs)?;
                     s.print_ident(param.ident)?;
@@ -2942,6 +2944,15 @@ impl<'a> State<'a> {
                         _ => Ok(())
                     }
                 }
+                ast::GenericParamKind::Const { ref ty } => {
+                    s.print_outer_attributes_inline(&param.attrs)?;
+                    s.word_space("const")?;
+                    s.print_ident(param.ident)?;
+                    s.s.space()?;
+                    s.word_space(":")?;
+                    s.print_type(ty)?;
+                    s.print_type_bounds(":", &param.bounds)
+                }
             }
         })?;
 
@@ -3210,10 +3221,10 @@ impl<'a> State<'a> {
 mod tests {
     use super::*;
 
-    use ast;
-    use source_map;
+    use crate::ast;
+    use crate::source_map;
+    use crate::with_globals;
     use syntax_pos;
-    use with_globals;
 
     #[test]
     fn test_fun_to_string() {
diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs
index 3effe53cd29..bc43630ae59 100644
--- a/src/libsyntax/ptr.rs
+++ b/src/libsyntax/ptr.rs
@@ -1,4 +1,4 @@
-//! The AST pointer
+//! The AST pointer.
 //!
 //! Provides `P<T>`, a frozen owned smart pointer, as a replacement for `@T` in
 //! the AST.
@@ -129,19 +129,19 @@ impl<T: 'static + Clone> Clone for P<T> {
 }
 
 impl<T: ?Sized + Debug> Debug for P<T> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         Debug::fmt(&self.ptr, f)
     }
 }
 
 impl<T: Display> Display for P<T> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         Display::fmt(&**self, f)
     }
 }
 
 impl<T> fmt::Pointer for P<T> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Pointer::fmt(&self.ptr, f)
     }
 }
diff --git a/src/libsyntax/show_span.rs b/src/libsyntax/show_span.rs
index 4228e0c87ce..2c32771266e 100644
--- a/src/libsyntax/show_span.rs
+++ b/src/libsyntax/show_span.rs
@@ -5,10 +5,10 @@
 
 use std::str::FromStr;
 
-use ast;
-use errors;
-use visit;
-use visit::Visitor;
+use crate::ast;
+use crate::errors;
+use crate::visit;
+use crate::visit::Visitor;
 
 enum Mode {
     Expression,
diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs
index 0a46d034558..1784bad0362 100644
--- a/src/libsyntax/source_map.rs
+++ b/src/libsyntax/source_map.rs
@@ -10,7 +10,7 @@
 
 pub use syntax_pos::*;
 pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo};
-pub use self::ExpnFormat::*;
+pub use ExpnFormat::*;
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::stable_hasher::StableHasher;
@@ -22,9 +22,11 @@ use std::path::{Path, PathBuf};
 use std::env;
 use std::fs;
 use std::io;
-use errors::SourceMapper;
+use log::debug;
 
-/// Return the span itself if it doesn't come from a macro expansion,
+use crate::errors::SourceMapper;
+
+/// Returns the span itself if it doesn't come from a macro expansion,
 /// otherwise return the call site span up to the `enclosing_sp` by
 /// following the `expn_info` chain.
 pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
@@ -60,7 +62,7 @@ pub trait FileLoader {
     /// Query the existence of a file.
     fn file_exists(&self, path: &Path) -> bool;
 
-    /// Return an absolute path to a file, if possible.
+    /// Returns an absolute path to a file, if possible.
     fn abs_path(&self, path: &Path) -> Option<PathBuf>;
 
     /// Read the contents of an UTF-8 file into memory.
@@ -167,7 +169,7 @@ impl SourceMap {
         Ok(self.new_source_file(filename, src))
     }
 
-    pub fn files(&self) -> MappedLockGuard<Vec<Lrc<SourceFile>>> {
+    pub fn files(&self) -> MappedLockGuard<'_, Vec<Lrc<SourceFile>>> {
         LockGuard::map(self.files.borrow(), |files| &mut files.source_files)
     }
 
@@ -396,7 +398,7 @@ impl SourceMap {
         }
     }
 
-    /// Returns `Some(span)`, a union of the lhs and rhs span.  The lhs must precede the rhs. If
+    /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
     /// there are gaps between lhs and rhs, the resulting union will cross these gaps.
     /// For this to work, the spans have to be:
     ///
@@ -509,7 +511,7 @@ impl SourceMap {
         Ok(FileLines {file: lo.file, lines: lines})
     }
 
-    /// Extract the source surrounding the given `Span` using the `extract_source` function. The
+    /// Extracts the source surrounding the given `Span` using the `extract_source` function. The
     /// extract function takes three arguments: a string slice containing the source, an index in
     /// the slice for the beginning of the span and an index in the slice for the end of the span.
     fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError>
@@ -559,7 +561,7 @@ impl SourceMap {
         }
     }
 
-    /// Return the source snippet as `String` corresponding to the given `Span`
+    /// Returns the source snippet as `String` corresponding to the given `Span`
     pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
         self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index]
                                                                 .to_string())
@@ -574,7 +576,7 @@ impl SourceMap {
         }
     }
 
-    /// Return the source snippet as `String` before the given `Span`
+    /// Returns the source snippet as `String` before the given `Span`
     pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
         self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string())
     }
@@ -1121,7 +1123,7 @@ mod tests {
 
     /// Given a string like " ~~~~~~~~~~~~ ", produces a span
     /// converting that range. The idea is that the string has the same
-    /// length as the input, and we uncover the byte positions.  Note
+    /// length as the input, and we uncover the byte positions. Note
     /// that this can span lines and so on.
     fn span_from_selection(input: &str, selection: &str) -> Span {
         assert_eq!(input.len(), selection.len());
@@ -1130,7 +1132,7 @@ mod tests {
         Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
     }
 
-    /// Test span_to_snippet and span_to_lines for a span converting 3
+    /// Tests span_to_snippet and span_to_lines for a span converting 3
     /// lines in the middle of a file.
     #[test]
     fn span_to_snippet_and_lines_spanning_multiple_lines() {
@@ -1173,7 +1175,7 @@ mod tests {
         assert_eq!(sstr, "blork.rs:2:1: 2:12");
     }
 
-    /// Test failing to merge two spans on different lines
+    /// Tests failing to merge two spans on different lines
     #[test]
     fn span_merging_fail() {
         let sm = SourceMap::new(FilePathMapping::empty());
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index e0770834421..b9758bd655c 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -1,14 +1,15 @@
-use ast;
-use attr;
+use crate::ast;
+use crate::attr;
+use crate::edition::Edition;
+use crate::ext::hygiene::{Mark, SyntaxContext};
+use crate::symbol::{Symbol, keywords};
+use crate::source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan};
+use crate::ptr::P;
+use crate::tokenstream::TokenStream;
+
 use std::cell::Cell;
 use std::iter;
-use edition::Edition;
-use ext::hygiene::{Mark, SyntaxContext};
-use symbol::{Symbol, keywords};
 use syntax_pos::{DUMMY_SP, Span};
-use source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan};
-use ptr::P;
-use tokenstream::TokenStream;
 
 /// Craft a span that will be ignored by the stability lint's
 /// call to source_map's `is_internal` check.
@@ -19,7 +20,9 @@ fn ignored_span(sp: Span) -> Span {
         call_site: DUMMY_SP,
         def_site: None,
         format: MacroAttribute(Symbol::intern("std_inject")),
-        allow_internal_unstable: true,
+        allow_internal_unstable: Some(vec![
+            Symbol::intern("prelude_import"),
+        ].into()),
         allow_internal_unsafe: false,
         local_inner_macros: false,
         edition: hygiene::default_edition(),
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index b352486e39a..f45bf034ba2 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -3,34 +3,35 @@
 #![allow(dead_code)]
 #![allow(unused_imports)]
 
-use self::HasTestSignature::*;
+use HasTestSignature::*;
 
 use std::iter;
 use std::slice;
 use std::mem;
 use std::vec;
-use attr::{self, HasAttrs};
+
+use log::debug;
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos};
 
-use source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan};
-use errors;
-use config;
-use entry::{self, EntryPointType};
-use ext::base::{ExtCtxt, Resolver};
-use ext::build::AstBuilder;
-use ext::expand::ExpansionConfig;
-use ext::hygiene::{self, Mark, SyntaxContext};
-use fold::Folder;
-use feature_gate::Features;
-use util::move_map::MoveMap;
-use fold::{self, ExpectOne};
-use parse::{token, ParseSess};
-use print::pprust;
-use ast::{self, Ident};
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::{self, Symbol, keywords};
-use ThinVec;
+use crate::attr::{self, HasAttrs};
+use crate::source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan};
+use crate::errors;
+use crate::config;
+use crate::entry::{self, EntryPointType};
+use crate::ext::base::{ExtCtxt, Resolver};
+use crate::ext::build::AstBuilder;
+use crate::ext::expand::ExpansionConfig;
+use crate::ext::hygiene::{self, Mark, SyntaxContext};
+use crate::mut_visit::{*, ExpectOne};
+use crate::feature_gate::Features;
+use crate::util::map_in_place::MapInPlace;
+use crate::parse::{token, ParseSess};
+use crate::print::pprust;
+use crate::ast::{self, Ident};
+use crate::ptr::P;
+use crate::symbol::{self, Symbol, keywords};
+use crate::ThinVec;
 
 struct Test {
     span: Span,
@@ -57,9 +58,9 @@ struct TestCtxt<'a> {
 pub fn modify_for_testing(sess: &ParseSess,
                           resolver: &mut dyn Resolver,
                           should_test: bool,
-                          krate: ast::Crate,
+                          krate: &mut ast::Crate,
                           span_diagnostic: &errors::Handler,
-                          features: &Features) -> ast::Crate {
+                          features: &Features) {
     // Check for #[reexport_test_harness_main = "some_name"] which
     // creates a `use __test::main as some_name;`. This needs to be
     // unconditional, so that the attribute is still marked as used in
@@ -75,8 +76,6 @@ pub fn modify_for_testing(sess: &ParseSess,
     if should_test {
         generate_test_harness(sess, resolver, reexport_test_harness_main,
                               krate, span_diagnostic, features, test_runner)
-    } else {
-        krate
     }
 }
 
@@ -88,21 +87,20 @@ struct TestHarnessGenerator<'a> {
     tested_submods: Vec<(Ident, Ident)>,
 }
 
-impl<'a> fold::Folder for TestHarnessGenerator<'a> {
-    fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
-        let mut folded = fold::noop_fold_crate(c, self);
+impl<'a> MutVisitor for TestHarnessGenerator<'a> {
+    fn visit_crate(&mut self, c: &mut ast::Crate) {
+        noop_visit_crate(c, self);
 
         // Create a main function to run our tests
         let test_main = {
             let unresolved = mk_main(&mut self.cx);
-            self.cx.ext_cx.monotonic_expander().fold_item(unresolved).pop().unwrap()
+            self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap()
         };
 
-        folded.module.items.push(test_main);
-        folded
+        c.module.items.push(test_main);
     }
 
-    fn fold_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+    fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
         let ident = i.ident;
         if ident.name != keywords::Invalid.name() {
             self.cx.path.push(ident);
@@ -123,16 +121,16 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
 
         // We don't want to recurse into anything other than mods, since
         // mods or tests inside of functions will break things
-        if let ast::ItemKind::Mod(module) = item.node {
+        if let ast::ItemKind::Mod(mut module) = item.node {
             let tests = mem::replace(&mut self.tests, Vec::new());
             let tested_submods = mem::replace(&mut self.tested_submods, Vec::new());
-            let mut mod_folded = fold::noop_fold_mod(module, self);
+            noop_visit_mod(&mut module, self);
             let tests = mem::replace(&mut self.tests, tests);
             let tested_submods = mem::replace(&mut self.tested_submods, tested_submods);
 
             if !tests.is_empty() || !tested_submods.is_empty() {
                 let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods);
-                mod_folded.items.push(it);
+                module.items.push(it);
 
                 if !self.cx.path.is_empty() {
                     self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym));
@@ -141,7 +139,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
                     self.cx.toplevel_reexport = Some(sym);
                 }
             }
-            item.node = ast::ItemKind::Mod(mod_folded);
+            item.node = ast::ItemKind::Mod(module);
         }
         if ident.name != keywords::Invalid.name() {
             self.cx.path.pop();
@@ -149,7 +147,9 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
         smallvec![P(item)]
     }
 
-    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
+    fn visit_mac(&mut self, _mac: &mut ast::Mac) {
+        // Do nothing.
+    }
 }
 
 /// A folder used to remove any entry points (like fn main) because the harness
@@ -159,20 +159,20 @@ struct EntryPointCleaner {
     depth: usize,
 }
 
-impl fold::Folder for EntryPointCleaner {
-    fn fold_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+impl MutVisitor for EntryPointCleaner {
+    fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
         self.depth += 1;
-        let folded = fold::noop_fold_item(i, self).expect_one("noop did something");
+        let item = noop_flat_map_item(i, self).expect_one("noop did something");
         self.depth -= 1;
 
         // Remove any #[main] or #[start] from the AST so it doesn't
         // clash with the one we're going to add, but mark it as
         // #[allow(dead_code)] to avoid printing warnings.
-        let folded = match entry::entry_point_type(&folded, self.depth) {
+        let item = match entry::entry_point_type(&item, self.depth) {
             EntryPointType::MainNamed |
             EntryPointType::MainAttr |
             EntryPointType::Start =>
-                folded.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
+                item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
                     let allow_ident = Ident::from_str("allow");
                     let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
                     let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident,
@@ -197,20 +197,22 @@ impl fold::Folder for EntryPointCleaner {
                     }
                 }),
             EntryPointType::None |
-            EntryPointType::OtherMain => folded,
+            EntryPointType::OtherMain => item,
         };
 
-        smallvec![folded]
+        smallvec![item]
     }
 
-    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
+    fn visit_mac(&mut self, _mac: &mut ast::Mac) {
+        // Do nothing.
+    }
 }
 
 /// Creates an item (specifically a module) that "pub use"s the tests passed in.
 /// Each tested submodule will contain a similar reexport module that we will export
 /// under the name of the original module. That is, `submod::__test_reexports` is
 /// reexported like so `pub use submod::__test_reexports as submod`.
-fn mk_reexport_mod(cx: &mut TestCtxt,
+fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
                    parent: ast::NodeId,
                    tests: Vec<Ident>,
                    tested_submods: Vec<(Ident, Ident)>)
@@ -235,7 +237,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt,
     let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports"));
     let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
     cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
-    let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
+    let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item {
         ident: sym,
         attrs: Vec::new(),
         id: ast::DUMMY_NODE_ID,
@@ -252,13 +254,13 @@ fn mk_reexport_mod(cx: &mut TestCtxt,
 fn generate_test_harness(sess: &ParseSess,
                          resolver: &mut dyn Resolver,
                          reexport_test_harness_main: Option<Symbol>,
-                         krate: ast::Crate,
+                         krate: &mut ast::Crate,
                          sd: &errors::Handler,
                          features: &Features,
-                         test_runner: Option<ast::Path>) -> ast::Crate {
+                         test_runner: Option<ast::Path>) {
     // Remove the entry points
     let mut cleaner = EntryPointCleaner { depth: 0 };
-    let krate = cleaner.fold_crate(krate);
+    cleaner.visit_crate(krate);
 
     let mark = Mark::fresh(Mark::root());
 
@@ -283,7 +285,11 @@ fn generate_test_harness(sess: &ParseSess,
         call_site: DUMMY_SP,
         def_site: None,
         format: MacroAttribute(Symbol::intern("test_case")),
-        allow_internal_unstable: true,
+        allow_internal_unstable: Some(vec![
+            Symbol::intern("main"),
+            Symbol::intern("test"),
+            Symbol::intern("rustc_attrs"),
+        ].into()),
         allow_internal_unsafe: false,
         local_inner_macros: false,
         edition: hygiene::default_edition(),
@@ -293,13 +299,13 @@ fn generate_test_harness(sess: &ParseSess,
         cx,
         tests: Vec::new(),
         tested_submods: Vec::new(),
-    }.fold_crate(krate)
+    }.visit_crate(krate);
 }
 
 /// Craft a span that will be ignored by the stability lint's
 /// call to source_map's `is_internal` check.
 /// The expanded code calls some unstable functions in the test crate.
-fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
+fn ignored_span(cx: &TestCtxt<'_>, sp: Span) -> Span {
     sp.with_ctxt(cx.ctxt)
 }
 
@@ -318,7 +324,7 @@ enum BadTestSignature {
 
 /// Creates a function item for use as the main function of a test build.
 /// This function will call the `test_runner` as specified by the crate attribute
-fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
+fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
     // Writing this out by hand with 'ignored_span':
     //        pub fn main() {
     //            #![main]
@@ -398,7 +404,7 @@ fn path_name_i(idents: &[Ident]) -> String {
 
 /// Creates a slice containing every test like so:
 /// &[path::to::test1, path::to::test2]
-fn mk_tests_slice(cx: &TestCtxt) -> P<ast::Expr> {
+fn mk_tests_slice(cx: &TestCtxt<'_>) -> P<ast::Expr> {
     debug!("building test vector from {} tests", cx.test_cases.len());
     let ref ecx = cx.ext_cx;
 
@@ -410,7 +416,7 @@ fn mk_tests_slice(cx: &TestCtxt) -> P<ast::Expr> {
 }
 
 /// Creates a path from the top-level __test module to the test via __test_reexports
-fn visible_path(cx: &TestCtxt, path: &[Ident]) -> Vec<Ident>{
+fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{
     let mut visible_path = vec![];
     match cx.toplevel_reexport {
         Some(id) => visible_path.push(id),
diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs
index 26b4762aaf3..add4d2bead1 100644
--- a/src/libsyntax/test_snippet.rs
+++ b/src/libsyntax/test_snippet.rs
@@ -1,6 +1,8 @@
-use source_map::{SourceMap, FilePathMapping};
-use errors::Handler;
-use errors::emitter::EmitterWriter;
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::Handler;
+use crate::errors::emitter::EmitterWriter;
+use crate::with_globals;
+
 use std::io;
 use std::io::prelude::*;
 use rustc_data_structures::sync::Lrc;
@@ -8,7 +10,6 @@ use std::str;
 use std::sync::{Arc, Mutex};
 use std::path::Path;
 use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
-use with_globals;
 
 /// Identify a position in the text by the Nth occurrence of a string.
 struct Position {
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index f5d2d6f18ee..c4f2cffb097 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -5,6 +5,7 @@
 //! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
 //!
 //! ## Ownership
+//!
 //! `TokenStreams` are persistent data structures constructed as ropes with reference
 //! counted-children. In general, this means that calling an operation on a `TokenStream`
 //! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
@@ -12,12 +13,15 @@
 //! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
+use crate::ext::base;
+use crate::ext::tt::{macro_parser, quoted};
+use crate::parse::Directory;
+use crate::parse::token::{self, DelimToken, Token};
+use crate::print::pprust;
+
 use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
-use ext::base;
-use ext::tt::{macro_parser, quoted};
-use parse::Directory;
-use parse::token::{self, DelimToken, Token};
-use print::pprust;
+#[cfg(target_arch = "x86_64")]
+use rustc_data_structures::static_assert;
 use rustc_data_structures::sync::Lrc;
 use serialize::{Decoder, Decodable, Encoder, Encodable};
 
@@ -46,7 +50,7 @@ pub enum TokenTree {
 
 impl TokenTree {
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: TokenStream)
+    pub fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let directory = Directory {
@@ -56,7 +60,7 @@ impl TokenTree {
         macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory), true)
     }
 
-    /// Check if this TokenTree is equal to the other, regardless of span information.
+    /// Checks if this TokenTree is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
             (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
@@ -86,7 +90,7 @@ impl TokenTree {
         }
     }
 
-    /// Retrieve the TokenTree's span.
+    /// Retrieves the TokenTree's span.
     pub fn span(&self) -> Span {
         match *self {
             TokenTree::Token(sp, _) => sp,
@@ -147,7 +151,7 @@ impl TokenTree {
 /// empty stream is represented with `None`; it may be represented as a `Some`
 /// around an empty `Vec`.
 #[derive(Clone, Debug)]
-pub struct TokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
+pub struct TokenStream(pub Option<Lrc<Vec<TreeAndJoint>>>);
 
 pub type TreeAndJoint = (TokenTree, IsJoint);
 
@@ -161,7 +165,7 @@ pub enum IsJoint {
     NonJoint
 }
 
-use self::IsJoint::*;
+use IsJoint::*;
 
 impl TokenStream {
     /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
@@ -255,7 +259,13 @@ impl TokenStream {
             0 => TokenStream::empty(),
             1 => streams.pop().unwrap(),
             _ => {
-                let mut vec = vec![];
+                // rust-lang/rust#57735: pre-allocate vector to avoid
+                // quadratic blow-up due to on-the-fly reallocations.
+                let tree_count = streams.iter()
+                    .map(|ts| match &ts.0 { None => 0, Some(s) => s.len() })
+                    .sum();
+                let mut vec = Vec::with_capacity(tree_count);
+
                 for stream in streams {
                     match stream.0 {
                         None => {},
@@ -486,7 +496,7 @@ impl Cursor {
 }
 
 impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.write_str(&pprust::tokens_to_string(self.clone()))
     }
 }
@@ -540,11 +550,11 @@ impl DelimSpan {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use syntax::ast::Ident;
-    use with_globals;
+    use crate::syntax::ast::Ident;
+    use crate::with_globals;
+    use crate::parse::token::Token;
+    use crate::util::parser_testing::string_to_stream;
     use syntax_pos::{Span, BytePos, NO_EXPANSION};
-    use parse::token::Token;
-    use util::parser_testing::string_to_stream;
 
     fn string_to_ts(string: &str) -> TokenStream {
         string_to_stream(string.to_owned())
diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs
index d6d2251b5bc..2f150d22159 100644
--- a/src/libsyntax/util/lev_distance.rs
+++ b/src/libsyntax/util/lev_distance.rs
@@ -1,7 +1,7 @@
 use std::cmp;
-use symbol::Symbol;
+use crate::symbol::Symbol;
 
-/// Find the Levenshtein distance between two strings
+/// Finds the Levenshtein distance between two strings
 pub fn lev_distance(a: &str, b: &str) -> usize {
     // cases which don't require further computation
     if a.is_empty() {
@@ -32,7 +32,7 @@ pub fn lev_distance(a: &str, b: &str) -> usize {
     dcol[t_last + 1]
 }
 
-/// Find the best match for a given word in the given iterator
+/// Finds the best match for a given word in the given iterator
 ///
 /// As a loose rule to avoid the obviously incorrect suggestions, it takes
 /// an optional limit for the maximum allowable edit distance, which defaults
@@ -101,7 +101,7 @@ fn test_lev_distance() {
 
 #[test]
 fn test_find_best_match_for_name() {
-    use with_globals;
+    use crate::with_globals;
     with_globals(|| {
         let input = vec![Symbol::intern("aaab"), Symbol::intern("aaabc")];
         assert_eq!(
diff --git a/src/libsyntax/util/move_map.rs b/src/libsyntax/util/map_in_place.rs
index a0f9d39ce89..5724b540a0d 100644
--- a/src/libsyntax/util/move_map.rs
+++ b/src/libsyntax/util/map_in_place.rs
@@ -1,18 +1,18 @@
 use std::ptr;
 use smallvec::{Array, SmallVec};
 
-pub trait MoveMap<T>: Sized {
-    fn move_map<F>(self, mut f: F) -> Self where F: FnMut(T) -> T {
-        self.move_flat_map(|e| Some(f(e)))
+pub trait MapInPlace<T>: Sized {
+    fn map_in_place<F>(&mut self, mut f: F) where F: FnMut(T) -> T {
+        self.flat_map_in_place(|e| Some(f(e)))
     }
 
-    fn move_flat_map<F, I>(self, f: F) -> Self
+    fn flat_map_in_place<F, I>(&mut self, f: F)
         where F: FnMut(T) -> I,
               I: IntoIterator<Item=T>;
 }
 
-impl<T> MoveMap<T> for Vec<T> {
-    fn move_flat_map<F, I>(mut self, mut f: F) -> Self
+impl<T> MapInPlace<T> for Vec<T> {
+    fn flat_map_in_place<F, I>(&mut self, mut f: F)
         where F: FnMut(T) -> I,
               I: IntoIterator<Item=T>
     {
@@ -53,22 +53,11 @@ impl<T> MoveMap<T> for Vec<T> {
             // write_i tracks the number of actually written new items.
             self.set_len(write_i);
         }
-
-        self
-    }
-}
-
-impl<T> MoveMap<T> for ::ptr::P<[T]> {
-    fn move_flat_map<F, I>(self, f: F) -> Self
-        where F: FnMut(T) -> I,
-              I: IntoIterator<Item=T>
-    {
-        ::ptr::P::from_vec(self.into_vec().move_flat_map(f))
     }
 }
 
-impl<T, A: Array<Item = T>> MoveMap<T> for SmallVec<A> {
-    fn move_flat_map<F, I>(mut self, mut f: F) -> Self
+impl<T, A: Array<Item = T>> MapInPlace<T> for SmallVec<A> {
+    fn flat_map_in_place<F, I>(&mut self, mut f: F)
         where F: FnMut(T) -> I,
               I: IntoIterator<Item=T>
     {
@@ -109,7 +98,5 @@ impl<T, A: Array<Item = T>> MoveMap<T> for SmallVec<A> {
             // write_i tracks the number of actually written new items.
             self.set_len(write_i);
         }
-
-        self
     }
 }
diff --git a/src/libsyntax/util/node_count.rs b/src/libsyntax/util/node_count.rs
index 7dd213ae38b..521edac8f5f 100644
--- a/src/libsyntax/util/node_count.rs
+++ b/src/libsyntax/util/node_count.rs
@@ -1,7 +1,7 @@
 // Simply gives a rought count of the number of nodes in an AST.
 
-use visit::*;
-use ast::*;
+use crate::visit::*;
+use crate::ast::*;
 use syntax_pos::Span;
 
 pub struct NodeCounter {
@@ -69,7 +69,7 @@ impl<'ast> Visitor<'ast> for NodeCounter {
         self.count += 1;
         walk_generics(self, g)
     }
-    fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, s: Span, _: NodeId) {
+    fn visit_fn(&mut self, fk: FnKind<'_>, fd: &FnDecl, s: Span, _: NodeId) {
         self.count += 1;
         walk_fn(self, fk, fd, s)
     }
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index 89d4e53b8d1..5f15ede7b0b 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -1,6 +1,6 @@
-use parse::token::{Token, BinOpToken};
-use symbol::keywords;
-use ast::{self, BinOpKind};
+use crate::parse::token::{Token, BinOpToken};
+use crate::symbol::keywords;
+use crate::ast::{self, BinOpKind};
 
 /// Associative operator with precedence.
 ///
@@ -70,9 +70,9 @@ pub enum Fixity {
 }
 
 impl AssocOp {
-    /// Create a new AssocOP from a token
+    /// Creates a new AssocOP from a token
     pub fn from_token(t: &Token) -> Option<AssocOp> {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *t {
             Token::BinOpEq(k) => Some(AssignOp(k)),
             Token::LArrow => Some(ObsoleteInPlace),
@@ -105,9 +105,9 @@ impl AssocOp {
         }
     }
 
-    /// Create a new AssocOp from ast::BinOpKind.
+    /// Creates a new AssocOp from ast::BinOpKind.
     pub fn from_ast_binop(op: BinOpKind) -> Self {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match op {
             BinOpKind::Lt => Less,
             BinOpKind::Gt => Greater,
@@ -132,7 +132,7 @@ impl AssocOp {
 
     /// Gets the precedence of this operator
     pub fn precedence(&self) -> usize {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             As | Colon => 14,
             Multiply | Divide | Modulus => 13,
@@ -152,7 +152,7 @@ impl AssocOp {
 
     /// Gets the fixity of this operator
     pub fn fixity(&self) -> Fixity {
-        use self::AssocOp::*;
+        use AssocOp::*;
         // NOTE: it is a bug to have an operators that has same precedence but different fixities!
         match *self {
             ObsoleteInPlace | Assign | AssignOp(_) => Fixity::Right,
@@ -164,7 +164,7 @@ impl AssocOp {
     }
 
     pub fn is_comparison(&self) -> bool {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual => true,
             ObsoleteInPlace | Assign | AssignOp(_) | As | Multiply | Divide | Modulus | Add |
@@ -174,7 +174,7 @@ impl AssocOp {
     }
 
     pub fn is_assign_like(&self) -> bool {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             Assign | AssignOp(_) | ObsoleteInPlace => true,
             Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | As | Multiply | Divide |
@@ -184,7 +184,7 @@ impl AssocOp {
     }
 
     pub fn to_ast_binop(&self) -> Option<BinOpKind> {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             Less => Some(BinOpKind::Lt),
             Greater => Some(BinOpKind::Gt),
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index d0b3cd865ce..733c4f83e37 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -1,18 +1,22 @@
-use ast::{self, Ident};
-use source_map::FilePathMapping;
-use parse::{ParseSess, PResult, source_file_to_stream};
-use parse::{lexer, new_parser_from_source_str};
-use parse::parser::Parser;
-use ptr::P;
-use tokenstream::TokenStream;
+use crate::ast::{self, Ident};
+use crate::source_map::FilePathMapping;
+use crate::parse::{ParseSess, PResult, source_file_to_stream};
+use crate::parse::{lexer, new_parser_from_source_str};
+use crate::parse::parser::Parser;
+use crate::ptr::P;
+use crate::tokenstream::TokenStream;
+
 use std::iter::Peekable;
 use std::path::PathBuf;
 
 /// Map a string to tts, using a made-up filename:
 pub fn string_to_stream(source_str: String) -> TokenStream {
     let ps = ParseSess::new(FilePathMapping::empty());
-    source_file_to_stream(&ps, ps.source_map()
-                             .new_source_file(PathBuf::from("bogofile").into(), source_str), None)
+    source_file_to_stream(
+        &ps,
+        ps.source_map().new_source_file(PathBuf::from("bogofile").into(),
+        source_str,
+    ), None).0
 }
 
 /// Map string to parser (via tts)
@@ -62,7 +66,7 @@ pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
     })
 }
 
-/// Convert a vector of strings to a vector of Ident's
+/// Converts a vector of strings to a vector of Ident's
 pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> {
     ids.iter().map(|u| Ident::from_str(*u)).collect()
 }
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 8cbd47ca70f..a002394c710 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -6,17 +6,18 @@
 //! Note: it is an important invariant that the default visitor walks the body
 //! of a function in "execution order" (more concretely, reverse post-order
 //! with respect to the CFG implied by the AST), meaning that if AST node A may
-//! execute before AST node B, then A is visited first.  The borrow checker in
+//! execute before AST node B, then A is visited first. The borrow checker in
 //! particular relies on this property.
 //!
 //! Note: walking an AST before macro expansion is probably a bad idea. For
 //! instance, a walker looking for item names in a module will miss all of
 //! those that are created by the expansion of a macro.
 
-use ast::*;
+use crate::ast::*;
+use crate::parse::token::Token;
+use crate::tokenstream::{TokenTree, TokenStream};
+
 use syntax_pos::Span;
-use parse::token::Token;
-use tokenstream::{TokenTree, TokenStream};
 
 #[derive(Copy, Clone)]
 pub enum FnKind<'a> {
@@ -31,12 +32,12 @@ pub enum FnKind<'a> {
 }
 
 /// Each method of the Visitor trait is a hook to be potentially
-/// overridden.  Each method's default implementation recursively visits
+/// overridden. Each method's default implementation recursively visits
 /// the substructure of the input via the corresponding `walk` method;
 /// e.g., the `visit_mod` method by default calls `visit::walk_mod`.
 ///
 /// If you want to ensure that your code handles every variant
-/// explicitly, you need to override each method.  (And you also need
+/// explicitly, you need to override each method. (And you also need
 /// to monitor future changes to `Visitor` in case a new method with a
 /// new default implementation gets introduced.)
 pub trait Visitor<'ast>: Sized {
@@ -125,6 +126,7 @@ pub trait Visitor<'ast>: Sized {
         match generic_arg {
             GenericArg::Lifetime(lt) => self.visit_lifetime(lt),
             GenericArg::Type(ty) => self.visit_ty(ty),
+            GenericArg::Const(ct) => self.visit_anon_const(ct),
         }
     }
     fn visit_assoc_type_binding(&mut self, type_binding: &'ast TypeBinding) {
@@ -485,6 +487,7 @@ pub fn walk_generic_param<'a, V: Visitor<'a>>(visitor: &mut V, param: &'a Generi
     match param.kind {
         GenericParamKind::Lifetime => {}
         GenericParamKind::Type { ref default } => walk_list!(visitor, visit_ty, default),
+        GenericParamKind::Const { ref ty, .. } => visitor.visit_ty(ty),
     }
 }