about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs26
-rw-r--r--src/libsyntax/attr/builtin.rs202
-rw-r--r--src/libsyntax/attr/mod.rs14
-rw-r--r--src/libsyntax/config.rs4
-rw-r--r--src/libsyntax/diagnostic_list.rs8
-rw-r--r--src/libsyntax/ext/base.rs20
-rw-r--r--src/libsyntax/ext/build.rs10
-rw-r--r--src/libsyntax/ext/expand.rs43
-rw-r--r--src/libsyntax/ext/quote.rs6
-rw-r--r--src/libsyntax/ext/source_util.rs2
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs114
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs174
-rw-r--r--src/libsyntax/ext/tt/quoted.rs2
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs4
-rw-r--r--src/libsyntax/feature_gate.rs60
-rw-r--r--src/libsyntax/fold.rs8
-rw-r--r--src/libsyntax/json.rs18
-rw-r--r--src/libsyntax/lib.rs17
-rw-r--r--src/libsyntax/parse/lexer/mod.rs131
-rw-r--r--src/libsyntax/parse/mod.rs83
-rw-r--r--src/libsyntax/parse/parser.rs160
-rw-r--r--src/libsyntax/parse/token.rs14
-rw-r--r--src/libsyntax/print/pprust.rs2
-rw-r--r--src/libsyntax/source_map.rs197
-rw-r--r--src/libsyntax/test.rs2
-rw-r--r--src/libsyntax/test_snippet.rs8
-rw-r--r--src/libsyntax/tokenstream.rs2
-rw-r--r--src/libsyntax/util/parser_testing.rs2
28 files changed, 867 insertions, 466 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 9ed628e2ed3..808e19d6f12 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -20,6 +20,8 @@ use print::pprust;
 use ptr::P;
 use rustc_data_structures::indexed_vec;
 use rustc_data_structures::indexed_vec::Idx;
+#[cfg(target_arch = "x86_64")]
+use rustc_data_structures::static_assert;
 use rustc_target::spec::abi::Abi;
 use source_map::{dummy_spanned, respan, Spanned};
 use symbol::{keywords, Symbol};
@@ -129,6 +131,8 @@ pub struct PathSegment {
     /// The identifier portion of this path segment.
     pub ident: Ident,
 
+    pub id: NodeId,
+
     /// Type/lifetime parameters attached to this path. They come in
     /// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`.
     /// `None` means that no parameter list is supplied (`Path`),
@@ -140,7 +144,7 @@ pub struct PathSegment {
 
 impl PathSegment {
     pub fn from_ident(ident: Ident) -> Self {
-        PathSegment { ident, args: None }
+        PathSegment { ident, id: DUMMY_NODE_ID, args: None }
     }
     pub fn crate_root(span: Span) -> Self {
         PathSegment::from_ident(Ident::new(keywords::CrateRoot.name(), span))
@@ -286,9 +290,9 @@ pub enum TraitBoundModifier {
 }
 
 /// The AST represents all type param bounds as types.
-/// typeck::collect::compute_bounds matches these against
-/// the "special" built-in traits (see middle::lang_items) and
-/// detects Copy, Send and Sync.
+/// `typeck::collect::compute_bounds` matches these against
+/// the "special" built-in traits (see `middle::lang_items`) and
+/// detects `Copy`, `Send` and `Sync`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum GenericBound {
     Trait(PolyTraitRef, TraitBoundModifier),
@@ -922,6 +926,10 @@ pub struct Expr {
     pub attrs: ThinVec<Attribute>,
 }
 
+// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
+#[cfg(target_arch = "x86_64")]
+static_assert!(MEM_SIZE_OF_EXPR: std::mem::size_of::<Expr>() == 88);
+
 impl Expr {
     /// Whether this expression would be valid somewhere that expects a value, for example, an `if`
     /// condition.
@@ -1084,7 +1092,7 @@ pub enum ExprKind {
     /// A unary operation (For example: `!x`, `*x`)
     Unary(UnOp, P<Expr>),
     /// A literal (For example: `1`, `"foo"`)
-    Lit(P<Lit>),
+    Lit(Lit),
     /// A cast (`foo as f64`)
     Cast(P<Expr>, P<Ty>),
     Type(P<Expr>, P<Ty>),
@@ -1326,6 +1334,14 @@ impl LitKind {
         }
     }
 
+    /// Returns true if this literal is byte literal string false otherwise.
+    pub fn is_bytestr(&self) -> bool {
+        match self {
+            LitKind::ByteStr(_) => true,
+            _ => false,
+        }
+    }
+
     /// Returns true if this is a numeric literal.
     pub fn is_numeric(&self) -> bool {
         match *self {
diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs
index 1cc2e62a9c6..1bbc1accc07 100644
--- a/src/libsyntax/attr/builtin.rs
+++ b/src/libsyntax/attr/builtin.rs
@@ -24,10 +24,11 @@ enum AttrError {
     MissingSince,
     MissingFeature,
     MultipleStabilityLevels,
-    UnsupportedLiteral
+    UnsupportedLiteral(&'static str, /* is_bytestr */ bool),
 }
 
-fn handle_errors(diag: &Handler, span: Span, error: AttrError) {
+fn handle_errors(sess: &ParseSess, span: Span, error: AttrError) {
+    let diag = &sess.span_diagnostic;
     match error {
         AttrError::MultipleItem(item) => span_err!(diag, span, E0538,
                                                    "multiple '{}' items", item),
@@ -44,7 +45,23 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) {
         AttrError::MissingFeature => span_err!(diag, span, E0546, "missing 'feature'"),
         AttrError::MultipleStabilityLevels => span_err!(diag, span, E0544,
                                                         "multiple stability levels"),
-        AttrError::UnsupportedLiteral => span_err!(diag, span, E0565, "unsupported literal"),
+        AttrError::UnsupportedLiteral(
+            msg,
+            is_bytestr,
+        ) => {
+            let mut err = struct_span_err!(diag, span, E0565, "{}", msg);
+            if is_bytestr {
+                if let Ok(lint_str) = sess.source_map().span_to_snippet(span) {
+                    err.span_suggestion_with_applicability(
+                        span,
+                        "consider removing the prefix",
+                        format!("{}", &lint_str[1..]),
+                        Applicability::MaybeIncorrect,
+                    );
+                }
+            }
+            err.emit();
+        }
     }
 }
 
@@ -161,12 +178,12 @@ pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
 }
 
 /// Find the first stability attribute. `None` if none exists.
-pub fn find_stability(diagnostic: &Handler, attrs: &[Attribute],
+pub fn find_stability(sess: &ParseSess, attrs: &[Attribute],
                       item_sp: Span) -> Option<Stability> {
-    find_stability_generic(diagnostic, attrs.iter(), item_sp)
+    find_stability_generic(sess, attrs.iter(), item_sp)
 }
 
-fn find_stability_generic<'a, I>(diagnostic: &Handler,
+fn find_stability_generic<'a, I>(sess: &ParseSess,
                                  attrs_iter: I,
                                  item_sp: Span)
                                  -> Option<Stability>
@@ -178,6 +195,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
     let mut rustc_depr: Option<RustcDeprecation> = None;
     let mut rustc_const_unstable: Option<Symbol> = None;
     let mut promotable = false;
+    let diagnostic = &sess.span_diagnostic;
 
     'outer: for attr in attrs_iter {
         if ![
@@ -202,7 +220,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
             let meta = meta.as_ref().unwrap();
             let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
                 if item.is_some() {
-                    handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
+                    handle_errors(sess, meta.span, AttrError::MultipleItem(meta.name()));
                     return false
                 }
                 if let Some(v) = meta.value_str() {
@@ -229,14 +247,22 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                                 _ => {
                                     let expected = &[ $( stringify!($name) ),+ ];
                                     handle_errors(
-                                        diagnostic,
+                                        sess,
                                         mi.span,
-                                        AttrError::UnknownMetaItem(mi.name(), expected));
+                                        AttrError::UnknownMetaItem(mi.name(), expected),
+                                    );
                                     continue 'outer
                                 }
                             }
                         } else {
-                            handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral);
+                            handle_errors(
+                                sess,
+                                meta.span,
+                                AttrError::UnsupportedLiteral(
+                                    "unsupported literal",
+                                    false,
+                                ),
+                            );
                             continue 'outer
                         }
                     }
@@ -261,7 +287,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                             })
                         }
                         (None, _) => {
-                            handle_errors(diagnostic, attr.span(), AttrError::MissingSince);
+                            handle_errors(sess, attr.span(), AttrError::MissingSince);
                             continue
                         }
                         _ => {
@@ -287,7 +313,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                 }
                 "unstable" => {
                     if stab.is_some() {
-                        handle_errors(diagnostic, attr.span(), AttrError::MultipleStabilityLevels);
+                        handle_errors(sess, attr.span(), AttrError::MultipleStabilityLevels);
                         break
                     }
 
@@ -302,7 +328,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                                 "issue" => if !get(mi, &mut issue) { continue 'outer },
                                 _ => {
                                     handle_errors(
-                                        diagnostic,
+                                        sess,
                                         meta.span,
                                         AttrError::UnknownMetaItem(
                                             mi.name(),
@@ -313,7 +339,14 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                                 }
                             }
                         } else {
-                            handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral);
+                            handle_errors(
+                                sess,
+                                meta.span,
+                                AttrError::UnsupportedLiteral(
+                                    "unsupported literal",
+                                    false,
+                                ),
+                            );
                             continue 'outer
                         }
                     }
@@ -340,7 +373,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                             })
                         }
                         (None, _, _) => {
-                            handle_errors(diagnostic, attr.span(), AttrError::MissingFeature);
+                            handle_errors(sess, attr.span(), AttrError::MissingFeature);
                             continue
                         }
                         _ => {
@@ -351,29 +384,41 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                 }
                 "stable" => {
                     if stab.is_some() {
-                        handle_errors(diagnostic, attr.span(), AttrError::MultipleStabilityLevels);
+                        handle_errors(sess, attr.span(), AttrError::MultipleStabilityLevels);
                         break
                     }
 
                     let mut feature = None;
                     let mut since = None;
                     for meta in metas {
-                        if let NestedMetaItemKind::MetaItem(ref mi) = meta.node {
-                            match &*mi.name().as_str() {
-                                "feature" => if !get(mi, &mut feature) { continue 'outer },
-                                "since" => if !get(mi, &mut since) { continue 'outer },
-                                _ => {
-                                    handle_errors(
-                                        diagnostic,
-                                        meta.span,
-                                        AttrError::UnknownMetaItem(mi.name(), &["since", "note"]),
-                                    );
-                                    continue 'outer
+                        match &meta.node {
+                            NestedMetaItemKind::MetaItem(mi) => {
+                                match &*mi.name().as_str() {
+                                    "feature" => if !get(mi, &mut feature) { continue 'outer },
+                                    "since" => if !get(mi, &mut since) { continue 'outer },
+                                    _ => {
+                                        handle_errors(
+                                            sess,
+                                            meta.span,
+                                            AttrError::UnknownMetaItem(
+                                                mi.name(), &["since", "note"],
+                                            ),
+                                        );
+                                        continue 'outer
+                                    }
                                 }
+                            },
+                            NestedMetaItemKind::Literal(lit) => {
+                                handle_errors(
+                                    sess,
+                                    lit.span,
+                                    AttrError::UnsupportedLiteral(
+                                        "unsupported literal",
+                                        false,
+                                    ),
+                                );
+                                continue 'outer
                             }
-                        } else {
-                            handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral);
-                            continue 'outer
                         }
                     }
 
@@ -390,11 +435,11 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                             })
                         }
                         (None, _) => {
-                            handle_errors(diagnostic, attr.span(), AttrError::MissingFeature);
+                            handle_errors(sess, attr.span(), AttrError::MissingFeature);
                             continue
                         }
                         _ => {
-                            handle_errors(diagnostic, attr.span(), AttrError::MissingSince);
+                            handle_errors(sess, attr.span(), AttrError::MissingSince);
                             continue
                         }
                     }
@@ -462,7 +507,15 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat
                 error(cfg.span, "unexpected parentheses after `cfg` predicate key")
             }
             MetaItemKind::NameValue(lit) if !lit.node.is_str() => {
-                error(lit.span, "literal in `cfg` predicate value must be a string")
+                handle_errors(
+                    sess,
+                    lit.span,
+                    AttrError::UnsupportedLiteral(
+                        "literal in `cfg` predicate value must be a string",
+                        lit.node.is_bytestr()
+                    ),
+                );
+                true
             }
             MetaItemKind::NameValue(..) | MetaItemKind::Word => {
                 sess.config.contains(&(cfg.name(), cfg.value_str()))
@@ -481,7 +534,14 @@ pub fn eval_condition<F>(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F)
         ast::MetaItemKind::List(ref mis) => {
             for mi in mis.iter() {
                 if !mi.is_meta_item() {
-                    handle_errors(&sess.span_diagnostic, mi.span, AttrError::UnsupportedLiteral);
+                    handle_errors(
+                        sess,
+                        mi.span,
+                        AttrError::UnsupportedLiteral(
+                            "unsupported literal",
+                            false
+                        ),
+                    );
                     return false;
                 }
             }
@@ -523,18 +583,19 @@ pub struct Deprecation {
 }
 
 /// Find the deprecation attribute. `None` if none exists.
-pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute],
+pub fn find_deprecation(sess: &ParseSess, attrs: &[Attribute],
                         item_sp: Span) -> Option<Deprecation> {
-    find_deprecation_generic(diagnostic, attrs.iter(), item_sp)
+    find_deprecation_generic(sess, attrs.iter(), item_sp)
 }
 
-fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
+fn find_deprecation_generic<'a, I>(sess: &ParseSess,
                                    attrs_iter: I,
                                    item_sp: Span)
                                    -> Option<Deprecation>
     where I: Iterator<Item = &'a Attribute>
 {
     let mut depr: Option<Deprecation> = None;
+    let diagnostic = &sess.span_diagnostic;
 
     'outer: for attr in attrs_iter {
         if attr.path != "deprecated" {
@@ -551,14 +612,27 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
         depr = if let Some(metas) = attr.meta_item_list() {
             let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
                 if item.is_some() {
-                    handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
+                    handle_errors(sess, meta.span, AttrError::MultipleItem(meta.name()));
                     return false
                 }
                 if let Some(v) = meta.value_str() {
                     *item = Some(v);
                     true
                 } else {
-                    span_err!(diagnostic, meta.span, E0551, "incorrect meta item");
+                    if let Some(lit) = meta.name_value_literal() {
+                        handle_errors(
+                            sess,
+                            lit.span,
+                            AttrError::UnsupportedLiteral(
+                                "literal in `deprecated` \
+                                value must be a string",
+                                lit.node.is_bytestr()
+                            ),
+                        );
+                    } else {
+                        span_err!(diagnostic, meta.span, E0551, "incorrect meta item");
+                    }
+
                     false
                 }
             };
@@ -566,22 +640,32 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
             let mut since = None;
             let mut note = None;
             for meta in metas {
-                if let NestedMetaItemKind::MetaItem(ref mi) = meta.node {
-                    match &*mi.name().as_str() {
-                        "since" => if !get(mi, &mut since) { continue 'outer },
-                        "note" => if !get(mi, &mut note) { continue 'outer },
-                        _ => {
-                            handle_errors(
-                                diagnostic,
-                                meta.span,
-                                AttrError::UnknownMetaItem(mi.name(), &["since", "note"]),
-                            );
-                            continue 'outer
+                match &meta.node {
+                    NestedMetaItemKind::MetaItem(mi) => {
+                        match &*mi.name().as_str() {
+                            "since" => if !get(mi, &mut since) { continue 'outer },
+                            "note" => if !get(mi, &mut note) { continue 'outer },
+                            _ => {
+                                handle_errors(
+                                    sess,
+                                    meta.span,
+                                    AttrError::UnknownMetaItem(mi.name(), &["since", "note"]),
+                                );
+                                continue 'outer
+                            }
                         }
                     }
-                } else {
-                    handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral);
-                    continue 'outer
+                    NestedMetaItemKind::Literal(lit) => {
+                        handle_errors(
+                            sess,
+                            lit.span,
+                            AttrError::UnsupportedLiteral(
+                                "item in `deprecated` must be a key/value pair",
+                                false,
+                            ),
+                        );
+                        continue 'outer
+                    }
                 }
             }
 
@@ -629,16 +713,24 @@ impl IntType {
 /// the same discriminant size that the corresponding C enum would or C
 /// structure layout, `packed` to remove padding, and `transparent` to elegate representation
 /// concerns to the only non-ZST field.
-pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr> {
+pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
     use self::ReprAttr::*;
 
     let mut acc = Vec::new();
+    let diagnostic = &sess.span_diagnostic;
     if attr.path == "repr" {
         if let Some(items) = attr.meta_item_list() {
             mark_used(attr);
             for item in items {
                 if !item.is_meta_item() {
-                    handle_errors(diagnostic, item.span, AttrError::UnsupportedLiteral);
+                    handle_errors(
+                        sess,
+                        item.span,
+                        AttrError::UnsupportedLiteral(
+                            "meta item in `repr` must be an identifier",
+                            false,
+                        ),
+                    );
                     continue
                 }
 
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index a980f3ab515..64876659477 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -219,6 +219,15 @@ impl MetaItem {
         name_from_path(&self.ident)
     }
 
+    // #[attribute(name = "value")]
+    //             ^^^^^^^^^^^^^^
+    pub fn name_value_literal(&self) -> Option<&Lit> {
+        match &self.node {
+            MetaItemKind::NameValue(v) => Some(v),
+            _ => None,
+        }
+    }
+
     pub fn value_str(&self) -> Option<Symbol> {
         match self.node {
             MetaItemKind::NameValue(ref v) => {
@@ -446,6 +455,11 @@ pub fn find_by_name<'a>(attrs: &'a [Attribute], name: &str) -> Option<&'a Attrib
     attrs.iter().find(|attr| attr.check_name(name))
 }
 
+pub fn filter_by_name<'a>(attrs: &'a [Attribute], name: &'a str)
+    -> impl Iterator<Item = &'a Attribute> {
+    attrs.iter().filter(move |attr| attr.check_name(name))
+}
+
 pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option<Symbol> {
     attrs.iter()
         .find(|at| at.check_name(name))
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index e611eb86dc1..d8fb20d4250 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -96,7 +96,7 @@ impl<'a> StripUnconfigured<'a> {
     /// when the configuration predicate is true, or otherwise expand into an
     /// empty list of attributes.
     ///
-    /// Gives a compiler warning when the `cfg_attr` contains no attribtes and
+    /// Gives a compiler warning when the `cfg_attr` contains no attributes and
     /// is in the original source file. Gives a compiler error if the syntax of
     /// the attribute is incorrect
     fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
@@ -138,7 +138,7 @@ impl<'a> StripUnconfigured<'a> {
         };
 
         // Check feature gate and lint on zero attributes in source. Even if the feature is gated,
-        // we still compute as if it wasn't, since the emitted error will stop compilation futher
+        // we still compute as if it wasn't, since the emitted error will stop compilation further
         // along the compilation.
         match (expanded_attrs.len(), gate_cfg_attr_multi) {
             (0, false) => {
diff --git a/src/libsyntax/diagnostic_list.rs b/src/libsyntax/diagnostic_list.rs
index 5155ebbe19d..cd421de5f1d 100644
--- a/src/libsyntax/diagnostic_list.rs
+++ b/src/libsyntax/diagnostic_list.rs
@@ -375,15 +375,15 @@ and likely to change in the future.
 
 E0705: r##"
 A `#![feature]` attribute was declared for a feature that is stable in
-the current edition.
+the current edition, but not in all editions.
 
 Erroneous code example:
 
 ```ignore (limited to a warning during 2018 edition development)
 #![feature(rust_2018_preview)]
-#![feature(impl_header_lifetime_elision)] // error: the feature
-                                          // `impl_header_lifetime_elision` is
-                                          // included in the Rust 2018 edition
+#![feature(test_2018_feature)] // error: the feature
+                               // `test_2018_feature` is
+                               // included in the Rust 2018 edition
 ```
 
 "##,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 5bf1a7dd663..88ee80e6088 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -247,8 +247,13 @@ impl<F> AttrProcMacro for F
 
 /// Represents a thing that maps token trees to Macro Results
 pub trait TTMacroExpander {
-    fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
-                   -> Box<dyn MacResult+'cx>;
+    fn expand<'cx>(
+        &self,
+        ecx: &'cx mut ExtCtxt,
+        span: Span,
+        input: TokenStream,
+        def_span: Option<Span>,
+    ) -> Box<dyn MacResult+'cx>;
 }
 
 pub type MacroExpanderFn =
@@ -259,8 +264,13 @@ impl<F> TTMacroExpander for F
     where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
     -> Box<dyn MacResult+'cx>
 {
-    fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
-                   -> Box<dyn MacResult+'cx> {
+    fn expand<'cx>(
+        &self,
+        ecx: &'cx mut ExtCtxt,
+        span: Span,
+        input: TokenStream,
+        _def_span: Option<Span>,
+    ) -> Box<dyn MacResult+'cx> {
         struct AvoidInterpolatedIdents;
 
         impl Folder for AvoidInterpolatedIdents {
@@ -481,7 +491,7 @@ impl DummyResult {
     pub fn raw_expr(sp: Span) -> P<ast::Expr> {
         P(ast::Expr {
             id: ast::DUMMY_NODE_ID,
-            node: ast::ExprKind::Lit(P(source_map::respan(sp, ast::LitKind::Bool(false)))),
+            node: ast::ExprKind::Lit(source_map::respan(sp, ast::LitKind::Bool(false))),
             span: sp,
             attrs: ThinVec::new(),
         })
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 6210003a40d..cacec867cf1 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -329,7 +329,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         } else {
             None
         };
-        segments.push(ast::PathSegment { ident: last_ident.with_span_pos(span), args });
+        segments.push(ast::PathSegment {
+            ident: last_ident.with_span_pos(span),
+            id: ast::DUMMY_NODE_ID,
+            args,
+        });
         let mut path = ast::Path { span, segments };
         if global {
             if let Some(seg) = path.make_root() {
@@ -366,7 +370,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         } else {
             None
         };
-        path.segments.push(ast::PathSegment { ident, args });
+        path.segments.push(ast::PathSegment { ident, id: ast::DUMMY_NODE_ID, args });
 
         (ast::QSelf {
             ty: self_type,
@@ -691,7 +695,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     }
 
     fn expr_lit(&self, sp: Span, lit: ast::LitKind) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Lit(P(respan(sp, lit))))
+        self.expr(sp, ast::ExprKind::Lit(respan(sp, lit)))
     }
     fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
         self.expr_lit(span, ast::LitKind::Int(i as u128,
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 9e06384f5a8..cc8af70a050 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -252,7 +252,7 @@ impl Invocation {
 
 pub struct MacroExpander<'a, 'b:'a> {
     pub cx: &'a mut ExtCtxt<'b>,
-    monotonic: bool, // c.f. `cx.monotonic_expander()`
+    monotonic: bool, // cf. `cx.monotonic_expander()`
 }
 
 impl<'a, 'b> MacroExpander<'a, 'b> {
@@ -387,6 +387,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         add_derived_markers(&mut self.cx, item.span(), &traits, item.clone());
                     let derives = derives.entry(invoc.expansion_data.mark).or_default();
 
+                    derives.reserve(traits.len());
+                    invocations.reserve(traits.len());
                     for path in &traits {
                         let mark = Mark::fresh(self.cx.current_expansion.mark);
                         derives.push(mark);
@@ -687,7 +689,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         "proc_macro_hygiene",
                         self.span,
                         GateIssue::Language,
-                        &format!("procedural macros cannot expand to macro definitions"),
+                        "procedural macros cannot expand to macro definitions",
                     );
                 }
                 visit::walk_item(self, i);
@@ -764,7 +766,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                                                                     edition) {
                     dummy_span
                 } else {
-                    kind.make_from(expander.expand(self.cx, span, mac.node.stream()))
+                    kind.make_from(expander.expand(self.cx, span, mac.node.stream(), None))
                 }
             }
 
@@ -785,7 +787,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                                                                     edition) {
                     dummy_span
                 } else {
-                    kind.make_from(expander.expand(self.cx, span, mac.node.stream()))
+                    kind.make_from(expander.expand(
+                        self.cx,
+                        span,
+                        mac.node.stream(),
+                        def_info.map(|(_, s)| s),
+                    ))
                 }
             }
 
@@ -1025,7 +1032,7 @@ impl<'a> Parser<'a> {
                 }
             },
             AstFragmentKind::Ty => AstFragment::Ty(self.parse_ty()?),
-            AstFragmentKind::Pat => AstFragment::Pat(self.parse_pat()?),
+            AstFragmentKind::Pat => AstFragment::Pat(self.parse_pat(None)?),
         })
     }
 
@@ -1036,10 +1043,28 @@ impl<'a> Parser<'a> {
             // Avoid emitting backtrace info twice.
             let def_site_span = self.span.with_ctxt(SyntaxContext::empty());
             let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
-            let msg = format!("caused by the macro expansion here; the usage \
-                               of `{}!` is likely invalid in {} context",
-                               macro_path, kind_name);
-            err.span_note(span, &msg).emit();
+            err.span_label(span, "caused by the macro expansion here");
+            let msg = format!(
+                "the usage of `{}!` is likely invalid in {} context",
+                macro_path,
+                kind_name,
+            );
+            err.note(&msg);
+            let semi_span = self.sess.source_map().next_point(span);
+
+            let semi_full_span = semi_span.to(self.sess.source_map().next_point(semi_span));
+            match self.sess.source_map().span_to_snippet(semi_full_span) {
+                Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => {
+                    err.span_suggestion_with_applicability(
+                        semi_span,
+                        "you might be missing a semicolon here",
+                        ";".to_owned(),
+                        Applicability::MaybeIncorrect,
+                    );
+                }
+                _ => {}
+            }
+            err.emit();
         }
     }
 }
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index 62bc9fae3b5..c6e0adbb5a4 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -274,7 +274,7 @@ pub mod rt {
             // FIXME: This is wrong
             P(ast::Expr {
                 id: ast::DUMMY_NODE_ID,
-                node: ast::ExprKind::Lit(P(self.clone())),
+                node: ast::ExprKind::Lit(self.clone()),
                 span: DUMMY_SP,
                 attrs: ThinVec::new(),
             }).to_tokens(cx)
@@ -305,7 +305,7 @@ pub mod rt {
                     let lit = ast::LitKind::Int(val as u128, ast::LitIntType::Signed($tag));
                     let lit = P(ast::Expr {
                         id: ast::DUMMY_NODE_ID,
-                        node: ast::ExprKind::Lit(P(dummy_spanned(lit))),
+                        node: ast::ExprKind::Lit(dummy_spanned(lit)),
                         span: DUMMY_SP,
                         attrs: ThinVec::new(),
                     });
@@ -419,7 +419,7 @@ pub fn parse_item_panic(parser: &mut Parser) -> Option<P<Item>> {
 }
 
 pub fn parse_pat_panic(parser: &mut Parser) -> P<Pat> {
-    panictry!(parser.parse_pat())
+    panictry!(parser.parse_pat(None))
 }
 
 pub fn parse_arm_panic(parser: &mut Parser) -> Arm {
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index f5d1bd6255e..e1ba8897a47 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -126,7 +126,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T
         }
     }
 
-    Box::new(ExpandResult { p: p })
+    Box::new(ExpandResult { p })
 }
 
 // include_str! : read the given file, insert it as a literal string expr
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 03a8376e763..f31d80acbfa 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -107,12 +107,12 @@ use std::rc::Rc;
 /// Either a sequence of token trees or a single one. This is used as the representation of the
 /// sequence of tokens that make up a matcher.
 #[derive(Clone)]
-enum TokenTreeOrTokenTreeSlice<'a> {
+enum TokenTreeOrTokenTreeSlice<'tt> {
     Tt(TokenTree),
-    TtSeq(&'a [TokenTree]),
+    TtSeq(&'tt [TokenTree]),
 }
 
-impl<'a> TokenTreeOrTokenTreeSlice<'a> {
+impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
     /// Returns the number of constituent top-level token trees of `self` (top-level in that it
     /// will not recursively descend into subtrees).
     fn len(&self) -> usize {
@@ -122,7 +122,7 @@ impl<'a> TokenTreeOrTokenTreeSlice<'a> {
         }
     }
 
-    /// The the `index`-th token tree of `self`.
+    /// The `index`-th token tree of `self`.
     fn get_tt(&self, index: usize) -> TokenTree {
         match *self {
             TtSeq(ref v) => v[index].clone(),
@@ -136,21 +136,41 @@ impl<'a> TokenTreeOrTokenTreeSlice<'a> {
 /// This is used by `inner_parse_loop` to keep track of delimited submatchers that we have
 /// descended into.
 #[derive(Clone)]
-struct MatcherTtFrame<'a> {
+struct MatcherTtFrame<'tt> {
     /// The "parent" matcher that we are descending into.
-    elts: TokenTreeOrTokenTreeSlice<'a>,
+    elts: TokenTreeOrTokenTreeSlice<'tt>,
     /// The position of the "dot" in `elts` at the time we descended.
     idx: usize,
 }
 
-/// Represents a single "position" (aka "matcher position", aka "item"), as described in the module
-/// documentation.
+type NamedMatchVec = SmallVec<[NamedMatch; 4]>;
+
+/// Represents a single "position" (aka "matcher position", aka "item"), as
+/// described in the module documentation.
+///
+/// Here:
+///
+/// - `'root` represents the lifetime of the stack slot that holds the root
+///   `MatcherPos`. As described in `MatcherPosHandle`, the root `MatcherPos`
+///   structure is stored on the stack, but subsequent instances are put into
+///   the heap.
+/// - `'tt` represents the lifetime of the token trees that this matcher
+///   position refers to.
+///
+/// It is important to distinguish these two lifetimes because we have a
+/// `SmallVec<TokenTreeOrTokenTreeSlice<'tt>>` below, and the destructor of
+/// that is considered to possibly access the data from its elements (it lacks
+/// a `#[may_dangle]` attribute). As a result, the compiler needs to know that
+/// all the elements in that `SmallVec` strictly outlive the root stack slot
+/// lifetime. By separating `'tt` from `'root`, we can show that.
 #[derive(Clone)]
-struct MatcherPos<'a> {
+struct MatcherPos<'root, 'tt: 'root> {
     /// The token or sequence of tokens that make up the matcher
-    top_elts: TokenTreeOrTokenTreeSlice<'a>,
+    top_elts: TokenTreeOrTokenTreeSlice<'tt>,
+
     /// The position of the "dot" in this matcher
     idx: usize,
+
     /// The first span of source source that the beginning of this matcher corresponds to. In other
     /// words, the token in the source whose span is `sp_open` is matched against the first token of
     /// the matcher.
@@ -168,7 +188,7 @@ struct MatcherPos<'a> {
     /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
     /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
     /// wants the shared `matches`, one should use `up.matches`.
-    matches: Vec<Rc<Vec<NamedMatch>>>,
+    matches: Box<[Rc<NamedMatchVec>]>,
     /// The position in `matches` corresponding to the first metavar in this matcher's sequence of
     /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
     /// to `matches[match_lo]`.
@@ -180,26 +200,31 @@ struct MatcherPos<'a> {
     /// in this matcher.
     match_hi: usize,
 
-    // Specifically used if we are matching a repetition. If we aren't both should be `None`.
+    // The following fields are used if we are matching a repetition. If we aren't, they should be
+    // `None`.
+
     /// The KleeneOp of this sequence if we are in a repetition.
     seq_op: Option<quoted::KleeneOp>,
-    /// The separator if we are in a repetition
+
+    /// The separator if we are in a repetition.
     sep: Option<Token>,
+
     /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
     /// before we enter the sequence.
-    up: Option<MatcherPosHandle<'a>>,
+    up: Option<MatcherPosHandle<'root, 'tt>>,
 
-    // Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from
-    // a delimited token tree (e.g. something wrapped in `(` `)`) or to get the contents of a doc
-    // comment...
+    /// Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from
+    /// a delimited token tree (e.g. something wrapped in `(` `)`) or to get the contents of a doc
+    /// comment...
+    ///
     /// When matching against matchers with nested delimited submatchers (e.g. `pat ( pat ( .. )
     /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
     /// that where the bottom of the stack is the outermost matcher.
-    // Also, throughout the comments, this "descent" is often referred to as "unzipping"...
-    stack: Vec<MatcherTtFrame<'a>>,
+    /// Also, throughout the comments, this "descent" is often referred to as "unzipping"...
+    stack: SmallVec<[MatcherTtFrame<'tt>; 1]>,
 }
 
-impl<'a> MatcherPos<'a> {
+impl<'root, 'tt> MatcherPos<'root, 'tt> {
     /// Add `m` as a named match for the `idx`-th metavar.
     fn push_match(&mut self, idx: usize, m: NamedMatch) {
         let matches = Rc::make_mut(&mut self.matches[idx]);
@@ -216,12 +241,12 @@ impl<'a> MatcherPos<'a> {
 // Therefore, the initial MatcherPos is always allocated on the stack,
 // subsequent ones (of which there aren't that many) are allocated on the heap,
 // and this type is used to encapsulate both cases.
-enum MatcherPosHandle<'a> {
-    Ref(&'a mut MatcherPos<'a>),
-    Box(Box<MatcherPos<'a>>),
+enum MatcherPosHandle<'root, 'tt: 'root> {
+    Ref(&'root mut MatcherPos<'root, 'tt>),
+    Box(Box<MatcherPos<'root, 'tt>>),
 }
 
-impl<'a> Clone for MatcherPosHandle<'a> {
+impl<'root, 'tt> Clone for MatcherPosHandle<'root, 'tt> {
     // This always produces a new Box.
     fn clone(&self) -> Self {
         MatcherPosHandle::Box(match *self {
@@ -231,8 +256,8 @@ impl<'a> Clone for MatcherPosHandle<'a> {
     }
 }
 
-impl<'a> Deref for MatcherPosHandle<'a> {
-    type Target = MatcherPos<'a>;
+impl<'root, 'tt> Deref for MatcherPosHandle<'root, 'tt> {
+    type Target = MatcherPos<'root, 'tt>;
     fn deref(&self) -> &Self::Target {
         match *self {
             MatcherPosHandle::Ref(ref r) => r,
@@ -241,8 +266,8 @@ impl<'a> Deref for MatcherPosHandle<'a> {
     }
 }
 
-impl<'a> DerefMut for MatcherPosHandle<'a> {
-    fn deref_mut(&mut self) -> &mut MatcherPos<'a> {
+impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
+    fn deref_mut(&mut self) -> &mut MatcherPos<'root, 'tt> {
         match *self {
             MatcherPosHandle::Ref(ref mut r) => r,
             MatcherPosHandle::Box(ref mut b) => b,
@@ -278,14 +303,19 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
     })
 }
 
-/// Initialize `len` empty shared `Vec`s to be used to store matches of metavars.
-fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> {
-    (0..len).into_iter().map(|_| Rc::new(Vec::new())).collect()
+/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
+fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
+    if len == 0 {
+        vec![]
+    } else {
+        let empty_matches = Rc::new(SmallVec::new());
+        vec![empty_matches.clone(); len]
+    }.into_boxed_slice()
 }
 
 /// Generate the top-level matcher position in which the "dot" is before the first token of the
 /// matcher `ms` and we are going to start matching at the span `open` in the source.
-fn initial_matcher_pos(ms: &[TokenTree], open: Span) -> MatcherPos {
+fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> {
     let match_idx_hi = count_names(ms);
     let matches = create_matches(match_idx_hi);
     MatcherPos {
@@ -305,7 +335,7 @@ fn initial_matcher_pos(ms: &[TokenTree], open: Span) -> MatcherPos {
         match_hi: match_idx_hi,
 
         // Haven't descended into any delimiters, so empty stack
-        stack: vec![],
+        stack: smallvec![],
 
         // Haven't descended into any sequences, so both of these are `None`.
         seq_op: None,
@@ -332,7 +362,7 @@ fn initial_matcher_pos(ms: &[TokenTree], open: Span) -> MatcherPos {
 /// token tree it was derived from.
 #[derive(Debug, Clone)]
 pub enum NamedMatch {
-    MatchedSeq(Rc<Vec<NamedMatch>>, DelimSpan),
+    MatchedSeq(Rc<NamedMatchVec>, DelimSpan),
     MatchedNonterminal(Rc<Nonterminal>),
 }
 
@@ -438,12 +468,12 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
 /// # Returns
 ///
 /// A `ParseResult`. Note that matches are kept track of through the items generated.
-fn inner_parse_loop<'a>(
+fn inner_parse_loop<'root, 'tt>(
     sess: &ParseSess,
-    cur_items: &mut SmallVec<[MatcherPosHandle<'a>; 1]>,
-    next_items: &mut Vec<MatcherPosHandle<'a>>,
-    eof_items: &mut SmallVec<[MatcherPosHandle<'a>; 1]>,
-    bb_items: &mut SmallVec<[MatcherPosHandle<'a>; 1]>,
+    cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
+    next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
+    eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
+    bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
     token: &Token,
     span: syntax_pos::Span,
 ) -> ParseResult<()> {
@@ -540,14 +570,14 @@ fn inner_parse_loop<'a>(
                         new_item.match_cur += seq.num_captures;
                         new_item.idx += 1;
                         for idx in item.match_cur..item.match_cur + seq.num_captures {
-                            new_item.push_match(idx, MatchedSeq(Rc::new(vec![]), sp));
+                            new_item.push_match(idx, MatchedSeq(Rc::new(smallvec![]), sp));
                         }
                         cur_items.push(new_item);
                     }
 
                     let matches = create_matches(item.matches.len());
                     cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos {
-                        stack: vec![],
+                        stack: smallvec![],
                         sep: seq.separator.clone(),
                         seq_op: Some(seq.op),
                         idx: 0,
@@ -881,7 +911,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
                 FatalError.raise();
             }
         },
-        "pat" => token::NtPat(panictry!(p.parse_pat())),
+        "pat" => token::NtPat(panictry!(p.parse_pat(None))),
         "expr" => token::NtExpr(panictry!(p.parse_expr())),
         "literal" => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
         "ty" => token::NtTy(panictry!(p.parse_ty())),
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 87ade278c68..6bba891278a 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -50,7 +50,12 @@ pub struct ParserAnyMacro<'a> {
 impl<'a> ParserAnyMacro<'a> {
     pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
         let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self;
-        let fragment = panictry!(parser.parse_ast_fragment(kind, true));
+        let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
+            if e.span.is_dummy() {  // Get around lack of span in error (#30128)
+                e.set_span(site_span);
+            }
+            e
+        }));
 
         // We allow semicolons at the end of expressions -- e.g. the semicolon in
         // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
@@ -74,16 +79,19 @@ struct MacroRulesMacroExpander {
 }
 
 impl TTMacroExpander for MacroRulesMacroExpander {
-    fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt,
-                   sp: Span,
-                   input: TokenStream)
-                   -> Box<dyn MacResult+'cx> {
+    fn expand<'cx>(
+        &self,
+        cx: &'cx mut ExtCtxt,
+        sp: Span,
+        input: TokenStream,
+        def_span: Option<Span>,
+    ) -> Box<dyn MacResult+'cx> {
         if !self.valid {
             return DummyResult::any(sp);
         }
         generic_extension(cx,
                           sp,
+                          def_span,
                           self.name,
                           input,
                           &self.lhses,
@@ -99,6 +107,7 @@ fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
 /// Given `lhses` and `rhses`, this is the new macro we create
 fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                           sp: Span,
+                          def_span: Option<Span>,
                           name: ast::Ident,
                           arg: TokenStream,
                           lhses: &[quoted::TokenTree],
@@ -133,8 +142,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                 // Replace all the tokens for the corresponding positions in the macro, to maintain
                 // proper positions in error reporting, while maintaining the macro_backtrace.
                 if rhs_spans.len() == tts.len() {
-                    tts = tts.map_enumerated(|i, tt| {
-                        let mut tt = tt.clone();
+                    tts = tts.map_enumerated(|i, mut tt| {
                         let mut sp = rhs_spans[i];
                         sp = sp.with_ctxt(tt.span().ctxt());
                         tt.set_span(sp);
@@ -178,7 +186,14 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
     }
 
     let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers"));
-    let mut err = cx.struct_span_err(best_fail_spot.substitute_dummy(sp), &best_fail_msg);
+    let span = best_fail_spot.substitute_dummy(sp);
+    let mut err = cx.struct_span_err(span, &best_fail_msg);
+    err.span_label(span, best_fail_msg);
+    if let Some(sp) = def_span {
+        if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
+            err.span_label(cx.source_map().def_span(sp), "when calling this macro");
+        }
+    }
 
     // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
     if let Some((arg, comma_span)) = arg.add_comma() {
@@ -189,7 +204,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
             };
             match TokenTree::parse(cx, lhs_tt, arg.clone()) {
                 Success(_) => {
-                    if comma_span == DUMMY_SP {
+                    if comma_span.is_dummy() {
                         err.note("you might be missing a comma");
                     } else {
                         err.span_suggestion_short_with_applicability(
@@ -340,7 +355,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
             }
         }
 
-        let unstable_feature = attr::find_stability(&sess.span_diagnostic,
+        let unstable_feature = attr::find_stability(&sess,
                                                     &def.attrs, def.span).and_then(|stability| {
             if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
                 Some((stability.feature, issue))
@@ -792,15 +807,15 @@ fn check_matcher_core(sess: &ParseSess,
             if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
                 for next_token in &suffix_first.tokens {
                     match is_in_follow(next_token, &frag_spec.as_str()) {
-                        Err((msg, help)) => {
+                        IsInFollow::Invalid(msg, help) => {
                             sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
                                 .help(help).emit();
                             // don't bother reporting every source of
                             // conflict for a particular element of `last`.
                             continue 'each_last;
                         }
-                        Ok(true) => {}
-                        Ok(false) => {
+                        IsInFollow::Yes => {}
+                        IsInFollow::No(ref possible) => {
                             let may_be = if last.tokens.len() == 1 &&
                                 suffix_first.tokens.len() == 1
                             {
@@ -809,15 +824,41 @@ fn check_matcher_core(sess: &ParseSess,
                                 "may be"
                             };
 
-                            sess.span_diagnostic.span_err(
-                                next_token.span(),
+                            let sp = next_token.span();
+                            let mut err = sess.span_diagnostic.struct_span_err(
+                                sp,
                                 &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
                                           is not allowed for `{frag}` fragments",
                                          name=name,
                                          frag=frag_spec,
                                          next=quoted_tt_to_string(next_token),
-                                         may_be=may_be)
+                                         may_be=may_be),
                             );
+                            err.span_label(
+                                sp,
+                                format!("not allowed after `{}` fragments", frag_spec),
+                            );
+                            let msg = "allowed there are: ";
+                            match &possible[..] {
+                                &[] => {}
+                                &[t] => {
+                                    err.note(&format!(
+                                        "only {} is allowed after `{}` fragments",
+                                        t,
+                                        frag_spec,
+                                    ));
+                                }
+                                ts => {
+                                    err.note(&format!(
+                                        "{}{} or {}",
+                                        msg,
+                                        ts[..ts.len() - 1].iter().map(|s| *s)
+                                            .collect::<Vec<_>>().join(", "),
+                                        ts[ts.len() - 1],
+                                    ));
+                                }
+                            }
+                            err.emit();
                         }
                     }
                 }
@@ -860,6 +901,12 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
     }
 }
 
+enum IsInFollow {
+    Yes,
+    No(Vec<&'static str>),
+    Invalid(String, &'static str),
+}
+
 /// True if `frag` can legally be followed by the token `tok`. For
 /// fragments that can consume an unbounded number of tokens, `tok`
 /// must be within a well-defined follow set. This is intended to
@@ -868,80 +915,99 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
 /// break macros that were relying on that binary operator as a
 /// separator.
 // when changing this do not forget to update doc/book/macros.md!
-fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> {
+fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
     use self::quoted::TokenTree;
 
     if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
         // closing a token tree can never be matched by any fragment;
         // iow, we always require that `(` and `)` match, etc.
-        Ok(true)
+        IsInFollow::Yes
     } else {
         match frag {
             "item" => {
                 // since items *must* be followed by either a `;` or a `}`, we can
                 // accept anything after them
-                Ok(true)
+                IsInFollow::Yes
             },
             "block" => {
                 // anything can follow block, the braces provide an easy boundary to
                 // maintain
-                Ok(true)
+                IsInFollow::Yes
             },
-            "stmt" | "expr"  => match *tok {
-                TokenTree::Token(_, ref tok) => match *tok {
-                    FatArrow | Comma | Semi => Ok(true),
-                    _ => Ok(false)
-                },
-                _ => Ok(false),
+            "stmt" | "expr"  => {
+                let tokens = vec!["`=>`", "`,`", "`;`"];
+                match *tok {
+                    TokenTree::Token(_, ref tok) => match *tok {
+                        FatArrow | Comma | Semi => IsInFollow::Yes,
+                        _ => IsInFollow::No(tokens),
+                    },
+                    _ => IsInFollow::No(tokens),
+                }
             },
-            "pat" => match *tok {
-                TokenTree::Token(_, ref tok) => match *tok {
-                    FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
-                    Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true),
-                    _ => Ok(false)
-                },
-                _ => Ok(false),
+            "pat" => {
+                let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
+                match *tok {
+                    TokenTree::Token(_, ref tok) => match *tok {
+                        FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
+                        Ident(i, false) if i.name == "if" || i.name == "in" => IsInFollow::Yes,
+                        _ => IsInFollow::No(tokens),
+                    },
+                    _ => IsInFollow::No(tokens),
+                }
             },
-            "path" | "ty" => match *tok {
-                TokenTree::Token(_, ref tok) => match *tok {
-                    OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
-                    Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
-                    Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true),
-                    _ => Ok(false)
-                },
-                TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
-                _ => Ok(false),
+            "path" | "ty" => {
+                let tokens = vec![
+                    "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
+                    "`where`",
+                ];
+                match *tok {
+                    TokenTree::Token(_, ref tok) => match *tok {
+                        OpenDelim(token::DelimToken::Brace) |
+                        OpenDelim(token::DelimToken::Bracket) |
+                        Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
+                        BinOp(token::Or) => IsInFollow::Yes,
+                        Ident(i, false) if i.name == "as" || i.name == "where" => IsInFollow::Yes,
+                        _ => IsInFollow::No(tokens),
+                    },
+                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => IsInFollow::Yes,
+                    _ => IsInFollow::No(tokens),
+                }
             },
             "ident" | "lifetime" => {
                 // being a single token, idents and lifetimes are harmless
-                Ok(true)
+                IsInFollow::Yes
             },
             "literal" => {
                 // literals may be of a single token, or two tokens (negative numbers)
-                Ok(true)
+                IsInFollow::Yes
             },
             "meta" | "tt" => {
                 // being either a single token or a delimited sequence, tt is
                 // harmless
-                Ok(true)
+                IsInFollow::Yes
             },
             "vis" => {
                 // Explicitly disallow `priv`, on the off chance it comes back.
+                let tokens = vec!["`,`", "an ident", "a type"];
                 match *tok {
                     TokenTree::Token(_, ref tok) => match *tok {
-                        Comma => Ok(true),
-                        Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true),
-                        ref tok => Ok(tok.can_begin_type())
+                        Comma => IsInFollow::Yes,
+                        Ident(i, is_raw) if is_raw || i.name != "priv" => IsInFollow::Yes,
+                        ref tok => if tok.can_begin_type() {
+                            IsInFollow::Yes
+                        } else {
+                            IsInFollow::No(tokens)
+                        }
                     },
                     TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"
                                                        || frag.name == "ty"
-                                                       || frag.name == "path" => Ok(true),
-                    _ => Ok(false)
+                                                       || frag.name == "path" => IsInFollow::Yes,
+                    _ => IsInFollow::No(tokens),
                 }
             },
-            "" => Ok(true), // keywords::Invalid
-            _ => Err((format!("invalid fragment specifier `{}`", frag),
-                     VALID_FRAGMENT_NAMES_MSG))
+            "" => IsInFollow::Yes, // keywords::Invalid
+            _ => IsInFollow::Invalid(format!("invalid fragment specifier `{}`", frag),
+                                     VALID_FRAGMENT_NAMES_MSG),
         }
     }
 }
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 74363f3e5f7..21848674831 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -496,7 +496,7 @@ where
                         return (None, KleeneOp::ZeroOrMore);
                     }
 
-                    // #2 is a Kleene op, which is the the only valid option
+                    // #2 is a Kleene op, which is the only valid option
                     Ok(Ok((op, _))) => {
                         // Warn that `?` as a separator will be deprecated
                         sess.buffer_lint(
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index ae486158fee..3d897d17e0b 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -219,9 +219,9 @@ impl Add for LockstepIterSize {
             LockstepIterSize::Unconstrained => other,
             LockstepIterSize::Contradiction(_) => self,
             LockstepIterSize::Constraint(l_len, ref l_id) => match other {
-                LockstepIterSize::Unconstrained => self.clone(),
+                LockstepIterSize::Unconstrained => self,
                 LockstepIterSize::Contradiction(_) => other,
-                LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(),
+                LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
                 LockstepIterSize::Constraint(r_len, r_id) => {
                     let msg = format!("inconsistent lockstep iteration: \
                                        '{}' has {} items, but '{}' has {}",
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index 84122688c83..55652c481bd 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -349,7 +349,7 @@ declare_features! (
     (active, abi_thiscall, "1.19.0", None, None),
 
     // Allows a test to fail without failing the whole suite
-    (active, allow_fail, "1.19.0", Some(42219), None),
+    (active, allow_fail, "1.19.0", Some(46488), None),
 
     // Allows unsized tuple coercion.
     (active, unsized_tuple_coercion, "1.20.0", Some(42877), None),
@@ -376,7 +376,7 @@ declare_features! (
     (active, non_exhaustive, "1.22.0", Some(44109), None),
 
     // `crate` as visibility modifier, synonymous to `pub(crate)`
-    (active, crate_visibility_modifier, "1.23.0", Some(45388), None),
+    (active, crate_visibility_modifier, "1.23.0", Some(53120), None),
 
     // extern types
     (active, extern_types, "1.23.0", Some(43467), None),
@@ -391,13 +391,13 @@ declare_features! (
     (active, generic_associated_types, "1.23.0", Some(44265), None),
 
     // `extern` in paths
-    (active, extern_in_paths, "1.23.0", Some(44660), None),
+    (active, extern_in_paths, "1.23.0", Some(55600), None),
 
     // Use `?` as the Kleene "at most one" operator
     (active, macro_at_most_once_rep, "1.25.0", Some(48075), None),
 
     // Infer static outlives requirements; RFC 2093
-    (active, infer_static_outlives_requirements, "1.26.0", Some(44493), None),
+    (active, infer_static_outlives_requirements, "1.26.0", Some(54185), None),
 
     // Multiple patterns with `|` in `if let` and `while let`
     (active, if_while_or_patterns, "1.26.0", Some(48215), None),
@@ -448,9 +448,6 @@ declare_features! (
     // Integer match exhaustiveness checking
     (active, exhaustive_integer_patterns, "1.30.0", Some(50907), None),
 
-    // RFC 2070: #[panic_implementation] / #[panic_handler]
-    (active, panic_implementation, "1.28.0", Some(44489), None),
-
     // #[doc(keyword = "...")]
     (active, doc_keyword, "1.28.0", Some(51315), None),
 
@@ -462,12 +459,11 @@ declare_features! (
 
     (active, abi_amdgpu_kernel, "1.29.0", Some(51575), None),
 
-    // impl<I:Iterator> Iterator for &mut Iterator
-    // impl Debug for Foo<'_>
-    (active, impl_header_lifetime_elision, "1.30.0", Some(15872), Some(Edition::Edition2018)),
+    // Perma-unstable; added for testing E0705
+    (active, test_2018_feature, "1.31.0", Some(0), Some(Edition::Edition2018)),
 
     // Support for arbitrary delimited token streams in non-macro attributes
-    (active, unrestricted_attribute_tokens, "1.30.0", Some(44690), None),
+    (active, unrestricted_attribute_tokens, "1.30.0", Some(55208), None),
 
     // Allows `use x::y;` to resolve through `self::x`, not just `::x`
     (active, uniform_paths, "1.30.0", Some(53130), None),
@@ -502,6 +498,12 @@ declare_features! (
 
     // Allows `const _: TYPE = VALUE`
     (active, underscore_const_names, "1.31.0", Some(54912), None),
+
+    // `extern crate foo as bar;` puts `bar` into extern prelude.
+    (active, extern_crate_item_prelude, "1.31.0", Some(55599), None),
+
+    // `reason = ` in lint attributes and `expect` lint attribute
+    (active, lint_reasons, "1.31.0", Some(54503), None),
 );
 
 declare_features! (
@@ -536,6 +538,8 @@ declare_features! (
      Some("subsumed by `#![feature(proc_macro_hygiene)]`")),
     (removed, proc_macro_gen, "1.27.0", Some(54727), None,
      Some("subsumed by `#![feature(proc_macro_hygiene)]`")),
+    (removed, panic_implementation, "1.28.0", Some(44489), None,
+     Some("subsumed by `#[panic_handler]`")),
 );
 
 declare_features! (
@@ -684,6 +688,9 @@ declare_features! (
     (accepted, min_const_fn, "1.31.0", Some(53555), None),
     // Scoped lints
     (accepted, tool_lints, "1.31.0", Some(44690), None),
+    // impl<I:Iterator> Iterator for &mut Iterator
+    // impl Debug for Foo<'_>
+    (accepted, impl_header_lifetime_elision, "1.31.0", Some(15872), None),
 );
 
 // If you change this, please modify src/doc/unstable-book as well. You must
@@ -1152,16 +1159,6 @@ pub const BUILTIN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeG
                                    "infer 'static lifetime requirements",
                                    cfg_fn!(infer_static_outlives_requirements))),
 
-    // RFC 2070 (deprecated attribute name)
-    ("panic_implementation",
-     Normal,
-     Gated(Stability::Deprecated("https://github.com/rust-lang/rust/issues/44489\
-                                  #issuecomment-415140224",
-                                 Some("replace this attribute with `#[panic_handler]`")),
-           "panic_implementation",
-           "this attribute was renamed to `panic_handler`",
-           cfg_fn!(panic_implementation))),
-
     // RFC 2070
     ("panic_handler", Normal, Ungated),
 
@@ -1617,7 +1614,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
             }
 
             ast::ItemKind::Struct(..) => {
-                if let Some(attr) = attr::find_by_name(&i.attrs[..], "repr") {
+                for attr in attr::filter_by_name(&i.attrs[..], "repr") {
                     for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
                         if item.check_name("simd") {
                             gate_feature_post!(&self, repr_simd, attr.span,
@@ -1627,19 +1624,13 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
                             if name == "packed" {
                                 gate_feature_post!(&self, repr_packed, attr.span,
                                                    "the `#[repr(packed(n))]` attribute \
-                                                   is experimental");
+                                                    is experimental");
                             }
                         }
                     }
                 }
             }
 
-            ast::ItemKind::TraitAlias(..) => {
-                gate_feature_post!(&self, trait_alias,
-                                   i.span,
-                                   "trait aliases are not yet fully implemented");
-            }
-
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
                 if polarity == ast::ImplPolarity::Negative {
                     gate_feature_post!(&self, optin_builtin_traits,
@@ -1661,6 +1652,15 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
                                    "auto traits are experimental and possibly buggy");
             }
 
+            ast::ItemKind::TraitAlias(..) => {
+                gate_feature_post!(
+                    &self,
+                    trait_alias,
+                    i.span,
+                    "trait aliases are experimental"
+                );
+            }
+
             ast::ItemKind::MacroDef(ast::MacroDef { legacy: false, .. }) => {
                 let msg = "`macro` is experimental";
                 gate_feature_post!(&self, decl_macro, i.span, msg);
@@ -1930,7 +1930,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
     let incomplete_features = ["generic_associated_types"];
 
     let mut features = Features::new();
-    let mut edition_enabled_features = FxHashMap();
+    let mut edition_enabled_features = FxHashMap::default();
 
     for &edition in ALL_EDITIONS {
         if edition <= crate_edition {
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 95a2298ca75..0e6e2f90693 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -468,8 +468,9 @@ pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize {
 
 pub fn noop_fold_path<T: Folder>(Path { segments, span }: Path, fld: &mut T) -> Path {
     Path {
-        segments: segments.move_map(|PathSegment { ident, args }| PathSegment {
+        segments: segments.move_map(|PathSegment { ident, id, args }| PathSegment {
             ident: fld.fold_ident(ident),
+            id: fld.new_id(id),
             args: args.map(|args| args.map(|args| fld.fold_generic_args(args))),
         }),
         span: fld.new_span(span)
@@ -944,7 +945,7 @@ pub fn noop_fold_item_kind<T: Folder>(i: ItemKind, folder: &mut T) -> ItemKind {
         ItemKind::Enum(enum_definition, generics) => {
             let generics = folder.fold_generics(generics);
             let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x));
-            ItemKind::Enum(ast::EnumDef { variants: variants }, generics)
+            ItemKind::Enum(ast::EnumDef { variants }, generics)
         }
         ItemKind::Struct(struct_def, generics) => {
             let generics = folder.fold_generics(generics);
@@ -965,7 +966,7 @@ pub fn noop_fold_item_kind<T: Folder>(i: ItemKind, folder: &mut T) -> ItemKind {
             polarity,
             defaultness,
             folder.fold_generics(generics),
-            ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref.clone())),
+            ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref)),
             folder.fold_ty(ty),
             impl_items.move_flat_map(|item| folder.fold_impl_item(item)),
         ),
@@ -1234,6 +1235,7 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mu
                 ExprKind::MethodCall(
                     PathSegment {
                         ident: folder.fold_ident(seg.ident),
+                        id: folder.new_id(seg.id),
                         args: seg.args.map(|args| {
                             args.map(|args| folder.fold_generic_args(args))
                         }),
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index b0cf29e9f63..a32682967c7 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -36,19 +36,19 @@ use rustc_serialize::json::{as_json, as_pretty_json};
 pub struct JsonEmitter {
     dst: Box<dyn Write + Send>,
     registry: Option<Registry>,
-    cm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>,
+    sm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>,
     pretty: bool,
     ui_testing: bool,
 }
 
 impl JsonEmitter {
     pub fn stderr(registry: Option<Registry>,
-                  code_map: Lrc<SourceMap>,
+                  source_map: Lrc<SourceMap>,
                   pretty: bool) -> JsonEmitter {
         JsonEmitter {
             dst: Box::new(io::stderr()),
             registry,
-            cm: code_map,
+            sm: source_map,
             pretty,
             ui_testing: false,
         }
@@ -62,12 +62,12 @@ impl JsonEmitter {
 
     pub fn new(dst: Box<dyn Write + Send>,
                registry: Option<Registry>,
-               code_map: Lrc<SourceMap>,
+               source_map: Lrc<SourceMap>,
                pretty: bool) -> JsonEmitter {
         JsonEmitter {
             dst,
             registry,
-            cm: code_map,
+            sm: source_map,
             pretty,
             ui_testing: false,
         }
@@ -199,7 +199,7 @@ impl Diagnostic {
         }
         let buf = BufWriter::default();
         let output = buf.clone();
-        EmitterWriter::new(Box::new(buf), Some(je.cm.clone()), false, false)
+        EmitterWriter::new(Box::new(buf), Some(je.sm.clone()), false, false)
             .ui_testing(je.ui_testing).emit(db);
         let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap();
         let output = String::from_utf8(output).unwrap();
@@ -269,8 +269,8 @@ impl DiagnosticSpan {
                       mut backtrace: vec::IntoIter<MacroBacktrace>,
                       je: &JsonEmitter)
                       -> DiagnosticSpan {
-        let start = je.cm.lookup_char_pos(span.lo());
-        let end = je.cm.lookup_char_pos(span.hi());
+        let start = je.sm.lookup_char_pos(span.lo());
+        let end = je.sm.lookup_char_pos(span.hi());
         let backtrace_step = backtrace.next().map(|bt| {
             let call_site =
                 Self::from_span_full(bt.call_site,
@@ -356,7 +356,7 @@ impl DiagnosticSpanLine {
     /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
     /// `span` within the line.
     fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
-        je.cm.span_to_lines(span)
+        je.sm.span_to_lines(span)
              .map(|lines| {
                  let fm = &*lines.file;
                  lines.lines
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index 9077eca1821..e9a6535cba1 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -70,6 +70,23 @@ macro_rules! panictry {
     })
 }
 
+// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
+macro_rules! panictry_buffer {
+    ($handler:expr, $e:expr) => ({
+        use std::result::Result::{Ok, Err};
+        use errors::{FatalError, DiagnosticBuilder};
+        match $e {
+            Ok(e) => e,
+            Err(errs) => {
+                for e in errs {
+                    DiagnosticBuilder::new_diagnostic($handler, e).emit();
+                }
+                FatalError.raise()
+            }
+        }
+    })
+}
+
 #[macro_export]
 macro_rules! unwrap_or {
     ($opt:expr, $default:expr) => {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index a814c88ee78..590506566dd 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -11,7 +11,7 @@
 use ast::{self, Ident};
 use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
 use source_map::{SourceMap, FilePathMapping};
-use errors::{Applicability, FatalError, DiagnosticBuilder};
+use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
 use parse::{token, ParseSess};
 use str::char_at;
 use symbol::{Symbol, keywords};
@@ -175,6 +175,16 @@ impl<'a> StringReader<'a> {
         self.fatal_errs.clear();
     }
 
+    pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
+        let mut buffer = Vec::new();
+
+        for err in self.fatal_errs.drain(..) {
+            err.buffer(&mut buffer);
+        }
+
+        buffer
+    }
+
     pub fn peek(&self) -> TokenAndSpan {
         // FIXME(pcwalton): Bad copy!
         TokenAndSpan {
@@ -251,16 +261,27 @@ impl<'a> StringReader<'a> {
         Ok(sr)
     }
 
+    pub fn new_or_buffered_errs(sess: &'a ParseSess,
+                                source_file: Lrc<syntax_pos::SourceFile>,
+                                override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
+        let mut sr = StringReader::new_raw(sess, source_file, override_span);
+        if sr.advance_token().is_err() {
+            Err(sr.buffer_fatal_errors())
+        } else {
+            Ok(sr)
+        }
+    }
+
     pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
         let begin = sess.source_map().lookup_byte_offset(span.lo());
         let end = sess.source_map().lookup_byte_offset(span.hi());
 
         // Make the range zero-length if the span is invalid.
-        if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {
+        if span.lo() > span.hi() || begin.sf.start_pos != end.sf.start_pos {
             span = span.shrink_to_lo();
         }
 
-        let mut sr = StringReader::new_raw_internal(sess, begin.fm, None);
+        let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
 
         // Seek the lexer to the right byte range.
         sr.next_pos = span.lo();
@@ -640,9 +661,9 @@ impl<'a> StringReader<'a> {
 
                 // I guess this is the only way to figure out if
                 // we're at the beginning of the file...
-                let cmap = SourceMap::new(FilePathMapping::empty());
-                cmap.files.borrow_mut().file_maps.push(self.source_file.clone());
-                let loc = cmap.lookup_char_pos_adj(self.pos);
+                let smap = SourceMap::new(FilePathMapping::empty());
+                smap.files.borrow_mut().source_files.push(self.source_file.clone());
+                let loc = smap.lookup_char_pos_adj(self.pos);
                 debug!("Skipping a shebang");
                 if loc.line == 1 && loc.col == CharPos(0) {
                     // FIXME: Add shebang "token", return it
@@ -1855,9 +1876,9 @@ mod tests {
     use rustc_data_structures::fx::FxHashSet;
     use rustc_data_structures::sync::Lock;
     use with_globals;
-    fn mk_sess(cm: Lrc<SourceMap>) -> ParseSess {
+    fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
         let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
-                                                          Some(cm.clone()),
+                                                          Some(sm.clone()),
                                                           false,
                                                           false);
         ParseSess {
@@ -1865,7 +1886,7 @@ mod tests {
             unstable_features: UnstableFeatures::from_environment(),
             config: CrateConfig::default(),
             included_mod_stack: Lock::new(Vec::new()),
-            code_map: cm,
+            source_map: sm,
             missing_fragment_specifiers: Lock::new(FxHashSet::default()),
             raw_identifier_spans: Lock::new(Vec::new()),
             registered_diagnostics: Lock::new(ErrorMap::new()),
@@ -1875,20 +1896,20 @@ mod tests {
     }
 
     // open a string reader for the given string
-    fn setup<'a>(cm: &SourceMap,
+    fn setup<'a>(sm: &SourceMap,
                  sess: &'a ParseSess,
                  teststr: String)
                  -> StringReader<'a> {
-        let fm = cm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
-        StringReader::new(sess, fm, None)
+        let sf = sm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
+        StringReader::new(sess, sf, None)
     }
 
     #[test]
     fn t1() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            let mut string_reader = setup(&cm,
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            let mut string_reader = setup(&sm,
                                         &sh,
                                         "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                             .to_string());
@@ -1934,9 +1955,9 @@ mod tests {
     #[test]
     fn doublecolonparsing() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            check_tokenization(setup(&cm, &sh, "a b".to_string()),
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            check_tokenization(setup(&sm, &sh, "a b".to_string()),
                             vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
         })
     }
@@ -1944,9 +1965,9 @@ mod tests {
     #[test]
     fn dcparsing_2() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            check_tokenization(setup(&cm, &sh, "a::b".to_string()),
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            check_tokenization(setup(&sm, &sh, "a::b".to_string()),
                             vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
         })
     }
@@ -1954,9 +1975,9 @@ mod tests {
     #[test]
     fn dcparsing_3() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
                             vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
         })
     }
@@ -1964,9 +1985,9 @@ mod tests {
     #[test]
     fn dcparsing_4() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
                             vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
         })
     }
@@ -1974,9 +1995,9 @@ mod tests {
     #[test]
     fn character_a() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
                     token::Literal(token::Char(Symbol::intern("a")), None));
         })
     }
@@ -1984,9 +2005,9 @@ mod tests {
     #[test]
     fn character_space() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
                     token::Literal(token::Char(Symbol::intern(" ")), None));
         })
     }
@@ -1994,9 +2015,9 @@ mod tests {
     #[test]
     fn character_escaped() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
                     token::Literal(token::Char(Symbol::intern("\\n")), None));
         })
     }
@@ -2004,9 +2025,9 @@ mod tests {
     #[test]
     fn lifetime_name() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
                     token::Lifetime(Ident::from_str("'abc")));
         })
     }
@@ -2014,9 +2035,9 @@ mod tests {
     #[test]
     fn raw_string() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
                         .next_token()
                         .tok,
                     token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
@@ -2026,15 +2047,15 @@ mod tests {
     #[test]
     fn literal_suffixes() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
             macro_rules! test {
                 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
-                    assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
+                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
                             token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
                                             Some(Symbol::intern("suffix"))));
                     // with a whitespace separator:
-                    assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
+                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
                             token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
                                             None));
                 }}
@@ -2050,13 +2071,13 @@ mod tests {
             test!("1.0", Float, "1.0");
             test!("1.0e10", Float, "1.0e10");
 
-            assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
                     token::Literal(token::Integer(Symbol::intern("2")),
                                     Some(Symbol::intern("us"))));
-            assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
                     token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
                                     Some(Symbol::intern("suffix"))));
-            assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
                     token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
                                     Some(Symbol::intern("suffix"))));
         })
@@ -2072,9 +2093,9 @@ mod tests {
     #[test]
     fn nested_block_comments() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
             match lexer.next_token().tok {
                 token::Comment => {}
                 _ => panic!("expected a comment!"),
@@ -2087,9 +2108,9 @@ mod tests {
     #[test]
     fn crlf_comments() {
         with_globals(|| {
-            let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-            let sh = mk_sess(cm.clone());
-            let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
+            let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+            let sh = mk_sess(sm.clone());
+            let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
             let comment = lexer.next_token();
             assert_eq!(comment.tok, token::Comment);
             assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 5c6d5816a47..fd66bf55a74 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -15,7 +15,7 @@ use ast::{self, CrateConfig, NodeId};
 use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
 use source_map::{SourceMap, FilePathMapping};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use errors::{Handler, ColorConfig, DiagnosticBuilder};
+use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
 use feature_gate::UnstableFeatures;
 use parse::parser::Parser;
 use ptr::P;
@@ -57,7 +57,7 @@ pub struct ParseSess {
     pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>,
     /// Used to determine and report recursive mod inclusions
     included_mod_stack: Lock<Vec<PathBuf>>,
-    code_map: Lrc<SourceMap>,
+    source_map: Lrc<SourceMap>,
     pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
 }
 
@@ -71,7 +71,7 @@ impl ParseSess {
         ParseSess::with_span_handler(handler, cm)
     }
 
-    pub fn with_span_handler(handler: Handler, code_map: Lrc<SourceMap>) -> ParseSess {
+    pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> ParseSess {
         ParseSess {
             span_diagnostic: handler,
             unstable_features: UnstableFeatures::from_environment(),
@@ -80,14 +80,14 @@ impl ParseSess {
             raw_identifier_spans: Lock::new(Vec::new()),
             registered_diagnostics: Lock::new(ErrorMap::new()),
             included_mod_stack: Lock::new(vec![]),
-            code_map,
+            source_map,
             non_modrs_mods: Lock::new(vec![]),
             buffered_lints: Lock::new(vec![]),
         }
     }
 
     pub fn source_map(&self) -> &SourceMap {
-        &self.code_map
+        &self.source_map
     }
 
     pub fn buffer_lint<S: Into<MultiSpan>>(&self,
@@ -174,12 +174,21 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
     source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
-// Create a new parser from a source string
+/// Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
                                       -> Parser {
-    let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source));
+    panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
+}
+
+/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
+/// token stream.
+pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
+    -> Result<Parser, Vec<Diagnostic>>
+{
+    let mut parser = maybe_source_file_to_parser(sess,
+                                                 sess.source_map().new_source_file(name, source))?;
     parser.recurse_into_file_modules = false;
-    parser
+    Ok(parser)
 }
 
 /// Create a new parser, handling errors as appropriate
@@ -204,14 +213,23 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 
 /// Given a source_file and config, return a parser
 fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
+    panictry_buffer!(&sess.span_diagnostic,
+                     maybe_source_file_to_parser(sess, source_file))
+}
+
+/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
+/// initial token stream.
+fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
+    -> Result<Parser, Vec<Diagnostic>>
+{
     let end_pos = source_file.end_pos;
-    let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None));
+    let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
 
     if parser.token == token::Eof && parser.span.is_dummy() {
         parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
     }
 
-    parser
+    Ok(parser)
 }
 
 // must preserve old name for now, because quote! from the *existing*
@@ -230,7 +248,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     match sess.source_map().load_file(path) {
         Ok(source_file) => source_file,
         Err(e) => {
-            let msg = format!("couldn't read {:?}: {}", path.display(), e);
+            let msg = format!("couldn't read {}: {}", path.display(), e);
             match spanopt {
                 Some(sp) => sess.span_diagnostic.span_fatal(sp, &msg).raise(),
                 None => sess.span_diagnostic.fatal(&msg).raise()
@@ -243,9 +261,25 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 pub fn source_file_to_stream(sess: &ParseSess,
                              source_file: Lrc<SourceFile>,
                              override_span: Option<Span>) -> TokenStream {
-    let mut srdr = lexer::StringReader::new(sess, source_file, override_span);
+    panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
+}
+
+/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
+/// parsing the token tream.
+pub fn maybe_file_to_stream(sess: &ParseSess,
+                            source_file: Lrc<SourceFile>,
+                            override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
+    let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
     srdr.real_token();
-    panictry!(srdr.parse_all_token_trees())
+
+    match srdr.parse_all_token_trees() {
+        Ok(stream) => Ok(stream),
+        Err(err) => {
+            let mut buffer = Vec::with_capacity(1);
+            err.buffer(&mut buffer);
+            Err(buffer)
+        }
+    }
 }
 
 /// Given stream and the `ParseSess`, produce a parser
@@ -494,8 +528,17 @@ fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
                  -> Option<ast::LitKind> {
     debug!("float_lit: {:?}, {:?}", s, suffix);
     // FIXME #2252: bounds checking float literals is deferred until trans
-    let s = s.chars().filter(|&c| c != '_').collect::<String>();
-    filtered_float_lit(Symbol::intern(&s), suffix, diag)
+
+    // Strip underscores without allocating a new String unless necessary.
+    let s2;
+    let s = if s.chars().any(|c| c == '_') {
+        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
+        &s2
+    } else {
+        s
+    };
+
+    filtered_float_lit(Symbol::intern(s), suffix, diag)
 }
 
 /// Parse a string representing a byte literal into its final form. Similar to `char_lit`
@@ -591,8 +634,14 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
                    -> Option<ast::LitKind> {
     // s can only be ascii, byte indexing is fine
 
-    let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
-    let mut s = &s2[..];
+    // Strip underscores without allocating a new String unless necessary.
+    let s2;
+    let mut s = if s.chars().any(|c| c == '_') {
+        s2 = s.chars().filter(|&c| c != '_').collect::<String>();
+        &s2
+    } else {
+        s
+    };
 
     debug!("integer_lit: {}, {:?}", s, suffix);
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index c7089a295fc..d90ec4ea081 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -611,6 +611,7 @@ impl<'a> Parser<'a> {
             t if t.is_special_ident() => "reserved identifier",
             t if t.is_used_keyword() => "keyword",
             t if t.is_unused_keyword() => "reserved keyword",
+            token::DocComment(..) => "doc comment",
             _ => return None,
         })
     }
@@ -644,8 +645,8 @@ impl<'a> Parser<'a> {
                 Ok(())
             } else {
                 let token_str = pprust::token_to_string(t);
-                let this_token_str = self.this_token_to_string();
-                let mut err = self.fatal(&format!("expected `{}`, found `{}`",
+                let this_token_str = self.this_token_descr();
+                let mut err = self.fatal(&format!("expected `{}`, found {}",
                                                   token_str,
                                                   this_token_str));
 
@@ -1295,7 +1296,7 @@ impl<'a> Parser<'a> {
             self.check_keyword(keywords::Extern) && self.is_extern_non_path()
     }
 
-    /// parse a TyKind::BareFn type:
+    /// parse a `TyKind::BareFn` type:
     fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
         /*
 
@@ -1444,8 +1445,8 @@ impl<'a> Parser<'a> {
                             Some(body)
                         }
                         _ => {
-                            let token_str = self.this_token_to_string();
-                            let mut err = self.fatal(&format!("expected `;` or `{{`, found `{}`",
+                            let token_str = self.this_token_descr();
+                            let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
                                                               token_str));
                             err.span_label(self.span, "expected `;` or `{`");
                             return Err(err);
@@ -1453,8 +1454,8 @@ impl<'a> Parser<'a> {
                     }
                 }
                 _ => {
-                    let token_str = self.this_token_to_string();
-                    let mut err = self.fatal(&format!("expected `;` or `{{`, found `{}`",
+                    let token_str = self.this_token_descr();
+                    let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
                                                       token_str));
                     err.span_label(self.span, "expected `;` or `{`");
                     return Err(err);
@@ -1532,7 +1533,7 @@ impl<'a> Parser<'a> {
                             if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
                         let path = match bounds[0] {
                             GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
-                            _ => self.bug("unexpected lifetime bound"),
+                            GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
                         };
                         self.parse_remaining_bounds(Vec::new(), path, lo, true)?
                     }
@@ -1789,6 +1790,35 @@ impl<'a> Parser<'a> {
         self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
+    /// Skip unexpected attributes and doc comments in this position and emit an appropriate error.
+    fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
+        if let token::DocComment(_) = self.token {
+            let mut err = self.diagnostic().struct_span_err(
+                self.span,
+                &format!("documentation comments cannot be applied to {}", applied_to),
+            );
+            err.span_label(self.span, "doc comments are not allowed here");
+            err.emit();
+            self.bump();
+        } else if self.token == token::Pound && self.look_ahead(1, |t| {
+            *t == token::OpenDelim(token::Bracket)
+        }) {
+            let lo = self.span;
+            // Skip every token until next possible arg.
+            while self.token != token::CloseDelim(token::Bracket) {
+                self.bump();
+            }
+            let sp = lo.to(self.span);
+            self.bump();
+            let mut err = self.diagnostic().struct_span_err(
+                sp,
+                &format!("attributes cannot be applied to {}", applied_to),
+            );
+            err.span_label(sp, "attributes are not allowed here");
+            err.emit();
+        }
+    }
+
     /// This version of parse arg doesn't necessarily require
     /// identifier names.
     fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
@@ -1797,7 +1827,8 @@ impl<'a> Parser<'a> {
         let (pat, ty) = if require_name || self.is_named_argument() {
             debug!("parse_arg_general parse_pat (require_name:{})",
                    require_name);
-            let pat = self.parse_pat()?;
+            self.eat_incorrect_doc_comment("method arguments");
+            let pat = self.parse_pat(Some("argument name"))?;
 
             if let Err(mut err) = self.expect(&token::Colon) {
                 // If we find a pattern followed by an identifier, it could be an (incorrect)
@@ -1819,10 +1850,12 @@ impl<'a> Parser<'a> {
                 return Err(err);
             }
 
+            self.eat_incorrect_doc_comment("a method argument's type");
             (pat, self.parse_ty()?)
         } else {
             debug!("parse_arg_general ident_to_pat");
             let parser_snapshot_before_ty = self.clone();
+            self.eat_incorrect_doc_comment("a method argument's type");
             let mut ty = self.parse_ty();
             if ty.is_ok() && self.token == token::Colon {
                 // This wasn't actually a type, but a pattern looking like a type,
@@ -1844,7 +1877,7 @@ impl<'a> Parser<'a> {
                     // Recover from attempting to parse the argument as a type without pattern.
                     err.cancel();
                     mem::replace(self, parser_snapshot_before_ty);
-                    let pat = self.parse_pat()?;
+                    let pat = self.parse_pat(Some("argument name"))?;
                     self.expect(&token::Colon)?;
                     let ty = self.parse_ty()?;
 
@@ -1882,7 +1915,7 @@ impl<'a> Parser<'a> {
 
     /// Parse an argument in a lambda header e.g. |arg, arg|
     fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
-        let pat = self.parse_pat()?;
+        let pat = self.parse_pat(Some("argument name"))?;
         let t = if self.eat(&token::Colon) {
             self.parse_ty()?
         } else {
@@ -1923,7 +1956,7 @@ impl<'a> Parser<'a> {
 
                 if suffix_illegal {
                     let sp = self.span;
-                    self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf)
+                    self.expect_no_suffix(sp, lit.literal_name(), suf)
                 }
 
                 result.unwrap()
@@ -1956,7 +1989,7 @@ impl<'a> Parser<'a> {
         let minus_lo = self.span;
         let minus_present = self.eat(&token::BinOp(token::Minus));
         let lo = self.span;
-        let literal = P(self.parse_lit()?);
+        let literal = self.parse_lit()?;
         let hi = self.prev_span;
         let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
 
@@ -2134,7 +2167,7 @@ impl<'a> Parser<'a> {
                 ParenthesisedArgs { inputs, output, span }.into()
             };
 
-            PathSegment { ident, args }
+            PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
         } else {
             // Generic arguments are not found.
             PathSegment::from_ident(ident)
@@ -2439,7 +2472,11 @@ impl<'a> Parser<'a> {
                     return Ok(self.mk_expr(lo.to(hi), ex, attrs));
                 }
                 if self.eat_keyword(keywords::Match) {
-                    return self.parse_match_expr(attrs);
+                    let match_sp = self.prev_span;
+                    return self.parse_match_expr(attrs).map_err(|mut err| {
+                        err.span_label(match_sp, "while parsing this match expression");
+                        err
+                    });
                 }
                 if self.eat_keyword(keywords::Unsafe) {
                     return self.parse_block_expr(
@@ -3228,7 +3265,7 @@ impl<'a> Parser<'a> {
                         }));
 
                         let expr_str = self.sess.source_map().span_to_snippet(expr.span)
-                                                .unwrap_or(pprust::expr_to_string(&expr));
+                                                .unwrap_or_else(|_| pprust::expr_to_string(&expr));
                         err.span_suggestion_with_applicability(
                             expr.span,
                             &format!("try {} the cast value", op_verb),
@@ -3745,7 +3782,7 @@ impl<'a> Parser<'a> {
                                   "`..` can only be used once per tuple or tuple struct pattern");
                 }
             } else if !self.check(&token::CloseDelim(token::Paren)) {
-                fields.push(self.parse_pat()?);
+                fields.push(self.parse_pat(None)?);
             } else {
                 break
             }
@@ -3801,7 +3838,7 @@ impl<'a> Parser<'a> {
                 }
             }
 
-            let subpat = self.parse_pat()?;
+            let subpat = self.parse_pat(None)?;
             if before_slice && self.eat(&token::DotDot) {
                 slice = Some(subpat);
                 before_slice = false;
@@ -3826,7 +3863,7 @@ impl<'a> Parser<'a> {
             // Parsing a pattern of the form "fieldname: pat"
             let fieldname = self.parse_field_name()?;
             self.bump();
-            let pat = self.parse_pat()?;
+            let pat = self.parse_pat(None)?;
             hi = pat.span;
             (pat, fieldname, false)
         } else {
@@ -3917,8 +3954,8 @@ impl<'a> Parser<'a> {
                     etc_span = Some(etc_sp);
                     break;
                 }
-                let token_str = self.this_token_to_string();
-                let mut err = self.fatal(&format!("expected `}}`, found `{}`", token_str));
+                let token_str = self.this_token_descr();
+                let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
 
                 err.span_label(self.span, "expected `}`");
                 let mut comma_sp = None;
@@ -4028,7 +4065,7 @@ impl<'a> Parser<'a> {
     /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
     /// to subpatterns within such).
     fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
-        let pat = self.parse_pat()?;
+        let pat = self.parse_pat(None)?;
         if self.token == token::Comma {
             // An unexpected comma after a top-level pattern is a clue that the
             // user (perhaps more accustomed to some other language) forgot the
@@ -4060,13 +4097,17 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse a pattern.
-    pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> {
-        self.parse_pat_with_range_pat(true)
+    pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
+        self.parse_pat_with_range_pat(true, expected)
     }
 
     /// Parse a pattern, with a setting whether modern range patterns e.g. `a..=b`, `a..b` are
     /// allowed.
-    fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<Pat>> {
+    fn parse_pat_with_range_pat(
+        &mut self,
+        allow_range_pat: bool,
+        expected: Option<&'static str>,
+    ) -> PResult<'a, P<Pat>> {
         maybe_whole!(self, NtPat, |x| x);
 
         let lo = self.span;
@@ -4082,7 +4123,7 @@ impl<'a> Parser<'a> {
                     err.span_label(self.span, "unexpected lifetime");
                     return Err(err);
                 }
-                let subpat = self.parse_pat_with_range_pat(false)?;
+                let subpat = self.parse_pat_with_range_pat(false, expected)?;
                 pat = PatKind::Ref(subpat, mutbl);
             }
             token::OpenDelim(token::Paren) => {
@@ -4128,7 +4169,7 @@ impl<'a> Parser<'a> {
                 pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
             } else if self.eat_keyword(keywords::Box) {
                 // Parse box pat
-                let subpat = self.parse_pat_with_range_pat(false)?;
+                let subpat = self.parse_pat_with_range_pat(false, None)?;
                 pat = PatKind::Box(subpat);
             } else if self.token.is_ident() && !self.token.is_reserved_ident() &&
                       self.parse_as_ident() {
@@ -4228,9 +4269,14 @@ impl<'a> Parser<'a> {
                     }
                     Err(mut err) => {
                         self.cancel(&mut err);
-                        let msg = format!("expected pattern, found {}", self.this_token_descr());
+                        let expected = expected.unwrap_or("pattern");
+                        let msg = format!(
+                            "expected {}, found {}",
+                            expected,
+                            self.this_token_descr(),
+                        );
                         let mut err = self.fatal(&msg);
-                        err.span_label(self.span, "expected pattern");
+                        err.span_label(self.span, format!("expected {}", expected));
                         return Err(err);
                     }
                 }
@@ -4274,7 +4320,7 @@ impl<'a> Parser<'a> {
                        -> PResult<'a, PatKind> {
         let ident = self.parse_ident()?;
         let sub = if self.eat(&token::At) {
-            Some(self.parse_pat()?)
+            Some(self.parse_pat(Some("binding pattern"))?)
         } else {
             None
         };
@@ -4680,8 +4726,8 @@ impl<'a> Parser<'a> {
                     } else {
                         ""
                     };
-                    let tok_str = self.this_token_to_string();
-                    let mut err = self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
+                    let tok_str = self.this_token_descr();
+                    let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
                                                       ident_str,
                                                       tok_str));
                     err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
@@ -4817,8 +4863,8 @@ impl<'a> Parser<'a> {
 
         if !self.eat(&token::OpenDelim(token::Brace)) {
             let sp = self.span;
-            let tok = self.this_token_to_string();
-            let mut e = self.span_fatal(sp, &format!("expected `{{`, found `{}`", tok));
+            let tok = self.this_token_descr();
+            let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
             let do_not_suggest_help =
                 self.token.is_keyword(keywords::In) || self.token == token::Colon;
 
@@ -4880,6 +4926,7 @@ impl<'a> Parser<'a> {
                 }
                 _ => ()
             }
+            e.span_label(sp, "expected `{`");
             return Err(e);
         }
 
@@ -4975,7 +5022,7 @@ impl<'a> Parser<'a> {
 
     fn warn_missing_semicolon(&self) {
         self.diagnostic().struct_span_warn(self.span, {
-            &format!("expected `;`, found `{}`", self.this_token_to_string())
+            &format!("expected `;`, found {}", self.this_token_descr())
         }).note({
             "This was erroneously allowed and will become a hard error in a future release"
         }).emit();
@@ -5779,7 +5826,7 @@ impl<'a> Parser<'a> {
                              ast::ImplItemKind)> {
         // code copied from parse_macro_use_or_failure... abstraction!
         if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
-            // Method macro.
+            // method macro
             Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
                 ast::ImplItemKind::Macro(mac)))
         } else {
@@ -6014,9 +6061,9 @@ impl<'a> Parser<'a> {
             self.expect(&token::Semi)?;
             body
         } else {
-            let token_str = self.this_token_to_string();
+            let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
-                "expected `where`, `{{`, `(`, or `;` after struct name, found `{}`",
+                "expected `where`, `{{`, `(`, or `;` after struct name, found {}",
                 token_str
             ));
             err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
@@ -6038,9 +6085,9 @@ impl<'a> Parser<'a> {
         } else if self.token == token::OpenDelim(token::Brace) {
             VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
         } else {
-            let token_str = self.this_token_to_string();
+            let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
-                "expected `where` or `{{` after union name, found `{}`", token_str));
+                "expected `where` or `{{` after union name, found {}", token_str));
             err.span_label(self.span, "expected `where` or `{` after union name");
             return Err(err);
         };
@@ -6088,9 +6135,9 @@ impl<'a> Parser<'a> {
             }
             self.eat(&token::CloseDelim(token::Brace));
         } else {
-            let token_str = self.this_token_to_string();
+            let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
-                    "expected `where`, or `{{` after struct name, found `{}`", token_str));
+                    "expected `where`, or `{{` after struct name, found {}", token_str));
             err.span_label(self.span, "expected `where`, or `{` after struct name");
             return Err(err);
         }
@@ -6166,8 +6213,8 @@ impl<'a> Parser<'a> {
             }
             _ => {
                 let sp = self.sess.source_map().next_point(self.prev_span);
-                let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found `{}`",
-                                                                self.this_token_to_string()));
+                let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
+                                                                self.this_token_descr()));
                 if self.token.is_ident() {
                     // This is likely another field; emit the diagnostic and keep going
                     err.span_suggestion_with_applicability(
@@ -6303,9 +6350,9 @@ impl<'a> Parser<'a> {
         }
 
         if !self.eat(term) {
-            let token_str = self.this_token_to_string();
-            let mut err = self.fatal(&format!("expected item, found `{}`", token_str));
-            if token_str == ";" {
+            let token_str = self.this_token_descr();
+            let mut err = self.fatal(&format!("expected item, found {}", token_str));
+            if self.token == token::Semi {
                 let msg = "consider removing this semicolon";
                 err.span_suggestion_short_with_applicability(
                     self.span, msg, String::new(), Applicability::MachineApplicable
@@ -6426,6 +6473,17 @@ impl<'a> Parser<'a> {
             self.directory.path.to_mut().push(&path.as_str());
             self.directory.ownership = DirectoryOwnership::Owned { relative: None };
         } else {
+            // We have to push on the current module name in the case of relative
+            // paths in order to ensure that any additional module paths from inline
+            // `mod x { ... }` come after the relative extension.
+            //
+            // For example, a `mod z { ... }` inside `x/y.rs` should set the current
+            // directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
+            if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
+                if let Some(ident) = relative.take() { // remove the relative offset
+                    self.directory.path.to_mut().push(ident.as_str());
+                }
+            }
             self.directory.path.to_mut().push(&id.as_str());
         }
     }
@@ -6781,11 +6839,11 @@ impl<'a> Parser<'a> {
         Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
     }
 
-    /// Parse type Foo = Bar;
+    /// Parse `type Foo = Bar;`
     /// or
-    /// existential type Foo: Bar;
+    /// `existential type Foo: Bar;`
     /// or
-    /// return None without modifying the parser state
+    /// `return None` without modifying the parser state
     fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
         // This parses the grammar:
         //     Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
@@ -6871,7 +6929,7 @@ impl<'a> Parser<'a> {
             _ => ()
         }
 
-        Ok(ast::EnumDef { variants: variants })
+        Ok(ast::EnumDef { variants })
     }
 
     /// Parse an "enum" declaration
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 01bc7f6ad30..1c6fc1ac185 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -79,14 +79,14 @@ pub enum Lit {
 }
 
 impl Lit {
-    crate fn short_name(&self) -> &'static str {
+    crate fn literal_name(&self) -> &'static str {
         match *self {
-            Byte(_) => "byte",
-            Char(_) => "char",
-            Integer(_) => "integer",
-            Float(_) => "float",
-            Str_(_) | StrRaw(..) => "string",
-            ByteStr(_) | ByteStrRaw(..) => "byte string"
+            Byte(_) => "byte literal",
+            Char(_) => "char literal",
+            Integer(_) => "integer literal",
+            Float(_) => "float literal",
+            Str_(_) | StrRaw(..) => "string literal",
+            ByteStr(_) | ByteStrRaw(..) => "byte string literal"
         }
     }
 
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 83a05921510..ce7708cc42e 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -3119,7 +3119,7 @@ impl<'a> State<'a> {
             if cmnt.style != comments::Trailing { return Ok(()) }
             let span_line = cm.lookup_char_pos(span.hi());
             let comment_line = cm.lookup_char_pos(cmnt.pos);
-            let next = next_pos.unwrap_or(cmnt.pos + BytePos(1));
+            let next = next_pos.unwrap_or_else(|| cmnt.pos + BytePos(1));
             if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
                 self.print_comment(cmnt)?;
             }
diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs
index 7ee58350621..e8cacc3b5af 100644
--- a/src/libsyntax/source_map.rs
+++ b/src/libsyntax/source_map.rs
@@ -106,17 +106,17 @@ impl FileLoader for RealFileLoader {
 // subsequent compilation sessions (which is something we need to do during
 // incremental compilation).
 #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
-pub struct StableFilemapId(u128);
+pub struct StableSourceFileId(u128);
 
-impl StableFilemapId {
-    pub fn new(source_file: &SourceFile) -> StableFilemapId {
+impl StableSourceFileId {
+    pub fn new(source_file: &SourceFile) -> StableSourceFileId {
         let mut hasher = StableHasher::new();
 
         source_file.name.hash(&mut hasher);
         source_file.name_was_remapped.hash(&mut hasher);
         source_file.unmapped_path.hash(&mut hasher);
 
-        StableFilemapId(hasher.finish())
+        StableSourceFileId(hasher.finish())
     }
 }
 
@@ -124,9 +124,10 @@ impl StableFilemapId {
 // SourceMap
 //
 
+#[derive(Default)]
 pub(super) struct SourceMapFiles {
-    pub(super) file_maps: Vec<Lrc<SourceFile>>,
-    stable_id_to_source_file: FxHashMap<StableFilemapId, Lrc<SourceFile>>
+    pub(super) source_files: Vec<Lrc<SourceFile>>,
+    stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>>
 }
 
 pub struct SourceMap {
@@ -143,10 +144,7 @@ pub struct SourceMap {
 impl SourceMap {
     pub fn new(path_mapping: FilePathMapping) -> SourceMap {
         SourceMap {
-            files: Lock::new(SourceMapFiles {
-                file_maps: Vec::new(),
-                stable_id_to_source_file: FxHashMap(),
-            }),
+            files: Default::default(),
             file_loader: Box::new(RealFileLoader),
             path_mapping,
             doctest_offset: None,
@@ -166,10 +164,7 @@ impl SourceMap {
                             path_mapping: FilePathMapping)
                             -> SourceMap {
         SourceMap {
-            files: Lock::new(SourceMapFiles {
-                file_maps: Vec::new(),
-                stable_id_to_source_file: FxHashMap(),
-            }),
+            files: Default::default(),
             file_loader: file_loader,
             path_mapping,
             doctest_offset: None,
@@ -195,15 +190,16 @@ impl SourceMap {
     }
 
     pub fn files(&self) -> MappedLockGuard<Vec<Lrc<SourceFile>>> {
-        LockGuard::map(self.files.borrow(), |files| &mut files.file_maps)
+        LockGuard::map(self.files.borrow(), |files| &mut files.source_files)
     }
 
-    pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option<Lrc<SourceFile>> {
-        self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone())
+    pub fn source_file_by_stable_id(&self, stable_id: StableSourceFileId) ->
+    Option<Lrc<SourceFile>> {
+        self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|sf| sf.clone())
     }
 
     fn next_start_pos(&self) -> usize {
-        match self.files.borrow().file_maps.last() {
+        match self.files.borrow().source_files.last() {
             None => 0,
             // Add one so there is some space between files. This lets us distinguish
             // positions in the source_map, even in the presence of zero-length files.
@@ -240,8 +236,8 @@ impl SourceMap {
 
         let mut files = self.files.borrow_mut();
 
-        files.file_maps.push(source_file.clone());
-        files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file),
+        files.source_files.push(source_file.clone());
+        files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file),
                                               source_file.clone());
 
         source_file
@@ -298,8 +294,8 @@ impl SourceMap {
 
         let mut files = self.files.borrow_mut();
 
-        files.file_maps.push(source_file.clone());
-        files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file),
+        files.source_files.push(source_file.clone());
+        files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file),
                                               source_file.clone());
 
         source_file
@@ -329,7 +325,7 @@ impl SourceMap {
     pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
         let chpos = self.bytepos_to_file_charpos(pos);
         match self.lookup_line(pos) {
-            Ok(SourceFileAndLine { fm: f, line: a }) => {
+            Ok(SourceFileAndLine { sf: f, line: a }) => {
                 let line = a + 1; // Line numbers start at 1
                 let linebpos = f.lines[a];
                 let linechpos = self.bytepos_to_file_charpos(linebpos);
@@ -392,10 +388,10 @@ impl SourceMap {
     pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
         let idx = self.lookup_source_file_idx(pos);
 
-        let f = (*self.files.borrow().file_maps)[idx].clone();
+        let f = (*self.files.borrow().source_files)[idx].clone();
 
         match f.lookup_line(pos) {
-            Some(line) => Ok(SourceFileAndLine { fm: f, line: line }),
+            Some(line) => Ok(SourceFileAndLine { sf: f, line: line }),
             None => Err(f)
         }
     }
@@ -446,7 +442,7 @@ impl SourceMap {
     }
 
     pub fn span_to_string(&self, sp: Span) -> String {
-        if self.files.borrow().file_maps.is_empty() && sp.is_dummy() {
+        if self.files.borrow().source_files.is_empty() && sp.is_dummy() {
             return "no-location".to_string();
         }
 
@@ -536,38 +532,38 @@ impl SourceMap {
         let local_begin = self.lookup_byte_offset(sp.lo());
         let local_end = self.lookup_byte_offset(sp.hi());
 
-        if local_begin.fm.start_pos != local_end.fm.start_pos {
+        if local_begin.sf.start_pos != local_end.sf.start_pos {
             return Err(SpanSnippetError::DistinctSources(DistinctSources {
-                begin: (local_begin.fm.name.clone(),
-                        local_begin.fm.start_pos),
-                end: (local_end.fm.name.clone(),
-                      local_end.fm.start_pos)
+                begin: (local_begin.sf.name.clone(),
+                        local_begin.sf.start_pos),
+                end: (local_end.sf.name.clone(),
+                      local_end.sf.start_pos)
             }));
         } else {
-            self.ensure_source_file_source_present(local_begin.fm.clone());
+            self.ensure_source_file_source_present(local_begin.sf.clone());
 
             let start_index = local_begin.pos.to_usize();
             let end_index = local_end.pos.to_usize();
-            let source_len = (local_begin.fm.end_pos -
-                              local_begin.fm.start_pos).to_usize();
+            let source_len = (local_begin.sf.end_pos -
+                              local_begin.sf.start_pos).to_usize();
 
             if start_index > end_index || end_index > source_len {
-                return Err(SpanSnippetError::MalformedForCodemap(
-                    MalformedCodemapPositions {
-                        name: local_begin.fm.name.clone(),
+                return Err(SpanSnippetError::MalformedForSourcemap(
+                    MalformedSourceMapPositions {
+                        name: local_begin.sf.name.clone(),
                         source_len,
                         begin_pos: local_begin.pos,
                         end_pos: local_end.pos,
                     }));
             }
 
-            if let Some(ref src) = local_begin.fm.src {
+            if let Some(ref src) = local_begin.sf.src {
                 return Ok(extract_source(src, start_index, end_index));
-            } else if let Some(src) = local_begin.fm.external_src.borrow().get_source() {
+            } else if let Some(src) = local_begin.sf.external_src.borrow().get_source() {
                 return Ok(extract_source(src, start_index, end_index));
             } else {
                 return Err(SpanSnippetError::SourceNotAvailable {
-                    filename: local_begin.fm.name.clone()
+                    filename: local_begin.sf.name.clone()
                 });
             }
         }
@@ -762,7 +758,7 @@ impl SourceMap {
             return 1;
         }
 
-        let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize();
+        let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
         debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len);
         // Ensure indexes are also not malformed.
         if start_index > end_index || end_index > source_len {
@@ -770,11 +766,11 @@ impl SourceMap {
             return 1;
         }
 
-        let src = local_begin.fm.external_src.borrow();
+        let src = local_begin.sf.external_src.borrow();
 
         // We need to extend the snippet to the end of the src rather than to end_index so when
         // searching forwards for boundaries we've got somewhere to search.
-        let snippet = if let Some(ref src) = local_begin.fm.src {
+        let snippet = if let Some(ref src) = local_begin.sf.src {
             let len = src.len();
             (&src[start_index..len])
         } else if let Some(src) = src.get_source() {
@@ -811,9 +807,9 @@ impl SourceMap {
     }
 
     pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
-        for fm in self.files.borrow().file_maps.iter() {
-            if *filename == fm.name {
-                return Some(fm.clone());
+        for sf in self.files.borrow().source_files.iter() {
+            if *filename == sf.name {
+                return Some(sf.clone());
             }
         }
         None
@@ -822,15 +818,15 @@ impl SourceMap {
     /// For a global BytePos compute the local offset within the containing SourceFile
     pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
         let idx = self.lookup_source_file_idx(bpos);
-        let fm = (*self.files.borrow().file_maps)[idx].clone();
-        let offset = bpos - fm.start_pos;
-        SourceFileAndBytePos {fm: fm, pos: offset}
+        let sf = (*self.files.borrow().source_files)[idx].clone();
+        let offset = bpos - sf.start_pos;
+        SourceFileAndBytePos {sf: sf, pos: offset}
     }
 
     /// Converts an absolute BytePos to a CharPos relative to the source_file.
     pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
         let idx = self.lookup_source_file_idx(bpos);
-        let map = &(*self.files.borrow().file_maps)[idx];
+        let map = &(*self.files.borrow().source_files)[idx];
 
         // The number of extra bytes due to multibyte chars in the SourceFile
         let mut total_extra_bytes = 0;
@@ -856,7 +852,7 @@ impl SourceMap {
     // Return the index of the source_file (in self.files) which contains pos.
     pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
         let files = self.files.borrow();
-        let files = &files.file_maps;
+        let files = &files.source_files;
         let count = files.len();
 
         // Binary search for the source_file.
@@ -942,7 +938,8 @@ impl SourceMap {
                 } else {
                     format!("{}<", &snippet[..offset])
                 };
-                new_snippet.push_str(&self.span_to_snippet(span).unwrap_or("T".to_string()));
+                new_snippet.push_str(
+                    &self.span_to_snippet(span).unwrap_or_else(|_| "T".to_string()));
                 new_snippet.push('>');
 
                 return Some((sugg_span, new_snippet));
@@ -978,9 +975,9 @@ impl SourceMapper for SourceMap {
         }
         sp
     }
-    fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool {
-        file_map.add_external_src(
-            || match file_map.name {
+    fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
+        source_file.add_external_src(
+            || match source_file.name {
                 FileName::Real(ref name) => self.file_loader.read_file(name).ok(),
                 _ => None,
             }
@@ -1035,97 +1032,97 @@ mod tests {
     use super::*;
     use rustc_data_structures::sync::Lrc;
 
-    fn init_code_map() -> SourceMap {
-        let cm = SourceMap::new(FilePathMapping::empty());
-        cm.new_source_file(PathBuf::from("blork.rs").into(),
+    fn init_source_map() -> SourceMap {
+        let sm = SourceMap::new(FilePathMapping::empty());
+        sm.new_source_file(PathBuf::from("blork.rs").into(),
                        "first line.\nsecond line".to_string());
-        cm.new_source_file(PathBuf::from("empty.rs").into(),
+        sm.new_source_file(PathBuf::from("empty.rs").into(),
                        String::new());
-        cm.new_source_file(PathBuf::from("blork2.rs").into(),
+        sm.new_source_file(PathBuf::from("blork2.rs").into(),
                        "first line.\nsecond line".to_string());
-        cm
+        sm
     }
 
     #[test]
     fn t3() {
         // Test lookup_byte_offset
-        let cm = init_code_map();
+        let sm = init_source_map();
 
-        let fmabp1 = cm.lookup_byte_offset(BytePos(23));
-        assert_eq!(fmabp1.fm.name, PathBuf::from("blork.rs").into());
-        assert_eq!(fmabp1.pos, BytePos(23));
+        let srcfbp1 = sm.lookup_byte_offset(BytePos(23));
+        assert_eq!(srcfbp1.sf.name, PathBuf::from("blork.rs").into());
+        assert_eq!(srcfbp1.pos, BytePos(23));
 
-        let fmabp1 = cm.lookup_byte_offset(BytePos(24));
-        assert_eq!(fmabp1.fm.name, PathBuf::from("empty.rs").into());
-        assert_eq!(fmabp1.pos, BytePos(0));
+        let srcfbp1 = sm.lookup_byte_offset(BytePos(24));
+        assert_eq!(srcfbp1.sf.name, PathBuf::from("empty.rs").into());
+        assert_eq!(srcfbp1.pos, BytePos(0));
 
-        let fmabp2 = cm.lookup_byte_offset(BytePos(25));
-        assert_eq!(fmabp2.fm.name, PathBuf::from("blork2.rs").into());
-        assert_eq!(fmabp2.pos, BytePos(0));
+        let srcfbp2 = sm.lookup_byte_offset(BytePos(25));
+        assert_eq!(srcfbp2.sf.name, PathBuf::from("blork2.rs").into());
+        assert_eq!(srcfbp2.pos, BytePos(0));
     }
 
     #[test]
     fn t4() {
         // Test bytepos_to_file_charpos
-        let cm = init_code_map();
+        let sm = init_source_map();
 
-        let cp1 = cm.bytepos_to_file_charpos(BytePos(22));
+        let cp1 = sm.bytepos_to_file_charpos(BytePos(22));
         assert_eq!(cp1, CharPos(22));
 
-        let cp2 = cm.bytepos_to_file_charpos(BytePos(25));
+        let cp2 = sm.bytepos_to_file_charpos(BytePos(25));
         assert_eq!(cp2, CharPos(0));
     }
 
     #[test]
     fn t5() {
         // Test zero-length source_files.
-        let cm = init_code_map();
+        let sm = init_source_map();
 
-        let loc1 = cm.lookup_char_pos(BytePos(22));
+        let loc1 = sm.lookup_char_pos(BytePos(22));
         assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into());
         assert_eq!(loc1.line, 2);
         assert_eq!(loc1.col, CharPos(10));
 
-        let loc2 = cm.lookup_char_pos(BytePos(25));
+        let loc2 = sm.lookup_char_pos(BytePos(25));
         assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into());
         assert_eq!(loc2.line, 1);
         assert_eq!(loc2.col, CharPos(0));
     }
 
-    fn init_code_map_mbc() -> SourceMap {
-        let cm = SourceMap::new(FilePathMapping::empty());
+    fn init_source_map_mbc() -> SourceMap {
+        let sm = SourceMap::new(FilePathMapping::empty());
         // € is a three byte utf8 char.
-        cm.new_source_file(PathBuf::from("blork.rs").into(),
+        sm.new_source_file(PathBuf::from("blork.rs").into(),
                        "fir€st €€€€ line.\nsecond line".to_string());
-        cm.new_source_file(PathBuf::from("blork2.rs").into(),
+        sm.new_source_file(PathBuf::from("blork2.rs").into(),
                        "first line€€.\n€ second line".to_string());
-        cm
+        sm
     }
 
     #[test]
     fn t6() {
         // Test bytepos_to_file_charpos in the presence of multi-byte chars
-        let cm = init_code_map_mbc();
+        let sm = init_source_map_mbc();
 
-        let cp1 = cm.bytepos_to_file_charpos(BytePos(3));
+        let cp1 = sm.bytepos_to_file_charpos(BytePos(3));
         assert_eq!(cp1, CharPos(3));
 
-        let cp2 = cm.bytepos_to_file_charpos(BytePos(6));
+        let cp2 = sm.bytepos_to_file_charpos(BytePos(6));
         assert_eq!(cp2, CharPos(4));
 
-        let cp3 = cm.bytepos_to_file_charpos(BytePos(56));
+        let cp3 = sm.bytepos_to_file_charpos(BytePos(56));
         assert_eq!(cp3, CharPos(12));
 
-        let cp4 = cm.bytepos_to_file_charpos(BytePos(61));
+        let cp4 = sm.bytepos_to_file_charpos(BytePos(61));
         assert_eq!(cp4, CharPos(15));
     }
 
     #[test]
     fn t7() {
         // Test span_to_lines for a span ending at the end of source_file
-        let cm = init_code_map();
+        let sm = init_source_map();
         let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
-        let file_lines = cm.span_to_lines(span).unwrap();
+        let file_lines = sm.span_to_lines(span).unwrap();
 
         assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into());
         assert_eq!(file_lines.lines.len(), 1);
@@ -1147,17 +1144,17 @@ mod tests {
     /// lines in the middle of a file.
     #[test]
     fn span_to_snippet_and_lines_spanning_multiple_lines() {
-        let cm = SourceMap::new(FilePathMapping::empty());
+        let sm = SourceMap::new(FilePathMapping::empty());
         let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
         let selection = "     \n    ~~\n~~~\n~~~~~     \n   \n";
-        cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
+        sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
         let span = span_from_selection(inputtext, selection);
 
         // check that we are extracting the text we thought we were extracting
-        assert_eq!(&cm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD");
+        assert_eq!(&sm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD");
 
         // check that span_to_lines gives us the complete result with the lines/cols we expected
-        let lines = cm.span_to_lines(span).unwrap();
+        let lines = sm.span_to_lines(span).unwrap();
         let expected = vec![
             LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) },
             LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) },
@@ -1169,9 +1166,9 @@ mod tests {
     #[test]
     fn t8() {
         // Test span_to_snippet for a span ending at the end of source_file
-        let cm = init_code_map();
+        let sm = init_source_map();
         let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
-        let snippet = cm.span_to_snippet(span);
+        let snippet = sm.span_to_snippet(span);
 
         assert_eq!(snippet, Ok("second line".to_string()));
     }
@@ -1179,9 +1176,9 @@ mod tests {
     #[test]
     fn t9() {
         // Test span_to_str for a span ending at the end of source_file
-        let cm = init_code_map();
+        let sm = init_source_map();
         let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
-        let sstr =  cm.span_to_string(span);
+        let sstr =  sm.span_to_string(span);
 
         assert_eq!(sstr, "blork.rs:2:1: 2:12");
     }
@@ -1189,15 +1186,15 @@ mod tests {
     /// Test failing to merge two spans on different lines
     #[test]
     fn span_merging_fail() {
-        let cm = SourceMap::new(FilePathMapping::empty());
+        let sm = SourceMap::new(FilePathMapping::empty());
         let inputtext  = "bbbb BB\ncc CCC\n";
         let selection1 = "     ~~\n      \n";
         let selection2 = "       \n   ~~~\n";
-        cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
+        sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
         let span1 = span_from_selection(inputtext, selection1);
         let span2 = span_from_selection(inputtext, selection2);
 
-        assert!(cm.merge_spans(span1, span2).is_none());
+        assert!(sm.merge_spans(span1, span2).is_none());
     }
 
     /// Returns the span corresponding to the `n`th occurrence of
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 9f554a90afb..8ff4b0d025c 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -346,7 +346,7 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
 
     test_runner.span = sp;
 
-    let test_main_path_expr = ecx.expr_path(test_runner.clone());
+    let test_main_path_expr = ecx.expr_path(test_runner);
     let call_test_main = ecx.expr_call(sp, test_main_path_expr,
                                        vec![mk_tests_slice(cx)]);
     let call_test_main = ecx.stmt_expr(call_test_main);
diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs
index d49965fd936..799489ba42c 100644
--- a/src/libsyntax/test_snippet.rs
+++ b/src/libsyntax/test_snippet.rs
@@ -50,8 +50,8 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
     with_globals(|| {
         let output = Arc::new(Mutex::new(Vec::new()));
 
-        let code_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-        code_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
+        let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
 
         let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
         let mut msp = MultiSpan::from_span(primary_span);
@@ -59,11 +59,11 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
             let span = make_span(&file_text, &span_label.start, &span_label.end);
             msp.push_span_label(span, span_label.label.to_string());
             println!("span: {:?} label: {:?}", span, span_label.label);
-            println!("text: {:?}", code_map.span_to_snippet(span));
+            println!("text: {:?}", source_map.span_to_snippet(span));
         }
 
         let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
-                                        Some(code_map.clone()),
+                                        Some(source_map.clone()),
                                         false,
                                         false);
         let handler = Handler::with_emitter(true, false, Box::new(emitter));
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 70867f9e42f..29bd63d28c5 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -606,7 +606,7 @@ impl Cursor {
             CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
             CursorKind::Stream(ref cursor) => TokenStream::concat_rc_vec({
                 cursor.stack.get(0).cloned().map(|(stream, _)| stream)
-                    .unwrap_or(cursor.stream.clone())
+                    .unwrap_or_else(|| cursor.stream.clone())
             }),
         }
     }
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index 374154e6333..98e9272e6d8 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -68,7 +68,7 @@ pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
 pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
     let ps = ParseSess::new(FilePathMapping::empty());
     with_error_checking_parse(source_str, &ps, |p| {
-        p.parse_pat()
+        p.parse_pat(None)
     })
 }