about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorTaiki Endo <te316e89@gmail.com>2019-02-07 02:33:01 +0900
committerTaiki Endo <te316e89@gmail.com>2019-02-07 02:33:01 +0900
commit7bb082d27fe472f52b103de0ae9fc6fa7e6546cc (patch)
treedfed08e00fc6e88022fd7249bd5017e5d57110a7 /src/libsyntax
parent2596bc1368d1e3d34c9a7841ad87a3100f01cbad (diff)
downloadrust-7bb082d27fe472f52b103de0ae9fc6fa7e6546cc.tar.gz
rust-7bb082d27fe472f52b103de0ae9fc6fa7e6546cc.zip
libsyntax => 2018
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/Cargo.toml1
-rw-r--r--src/libsyntax/ast.rs62
-rw-r--r--src/libsyntax/attr/builtin.rs17
-rw-r--r--src/libsyntax/attr/mod.rs42
-rw-r--r--src/libsyntax/config.rs21
-rw-r--r--src/libsyntax/diagnostics/metadata.rs9
-rw-r--r--src/libsyntax/diagnostics/plugin.rs29
-rw-r--r--src/libsyntax/early_buffered_lints.rs2
-rw-r--r--src/libsyntax/entry.rs4
-rw-r--r--src/libsyntax/ext/base.rs85
-rw-r--r--src/libsyntax/ext/build.rs19
-rw-r--r--src/libsyntax/ext/derive.rs19
-rw-r--r--src/libsyntax/ext/expand.rs49
-rw-r--r--src/libsyntax/ext/placeholders.rs23
-rw-r--r--src/libsyntax/ext/source_util.rs41
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs31
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs58
-rw-r--r--src/libsyntax/ext/tt/quoted.rs19
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs23
-rw-r--r--src/libsyntax/feature_gate.rs28
-rw-r--r--src/libsyntax/json.rs16
-rw-r--r--src/libsyntax/lib.rs29
-rw-r--r--src/libsyntax/mut_visit.rs33
-rw-r--r--src/libsyntax/parse/attr.rs18
-rw-r--r--src/libsyntax/parse/classify.rs2
-rw-r--r--src/libsyntax/parse/lexer/comments.rs29
-rw-r--r--src/libsyntax/parse/lexer/mod.rs35
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs8
-rw-r--r--src/libsyntax/parse/lexer/unicode_chars.rs2
-rw-r--r--src/libsyntax/parse/mod.rs62
-rw-r--r--src/libsyntax/parse/parser.rs112
-rw-r--r--src/libsyntax/parse/token.rs36
-rw-r--r--src/libsyntax/print/pp.rs3
-rw-r--r--src/libsyntax/print/pprust.rs45
-rw-r--r--src/libsyntax/ptr.rs6
-rw-r--r--src/libsyntax/show_span.rs8
-rw-r--r--src/libsyntax/source_map.rs8
-rw-r--r--src/libsyntax/std_inject.rs17
-rw-r--r--src/libsyntax/test.rs52
-rw-r--r--src/libsyntax/test_snippet.rs9
-rw-r--r--src/libsyntax/tokenstream.rs27
-rw-r--r--src/libsyntax/util/lev_distance.rs4
-rw-r--r--src/libsyntax/util/node_count.rs6
-rw-r--r--src/libsyntax/util/parser.rs20
-rw-r--r--src/libsyntax/util/parser_testing.rs15
-rw-r--r--src/libsyntax/visit.rs7
46 files changed, 617 insertions, 574 deletions
diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml
index fba2623e005..f1e60ba78b7 100644
--- a/src/libsyntax/Cargo.toml
+++ b/src/libsyntax/Cargo.toml
@@ -2,6 +2,7 @@
 authors = ["The Rust Project Developers"]
 name = "syntax"
 version = "0.0.0"
+edition = "2018"
 
 [lib]
 name = "syntax"
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 4f3f5631cc3..2cfe2cc896c 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -1,22 +1,23 @@
 // The Rust abstract syntax tree.
 
-pub use self::GenericArgs::*;
-pub use self::UnsafeSource::*;
-pub use symbol::{Ident, Symbol as Name};
-pub use util::parser::ExprPrecedence;
-
-use ext::hygiene::{Mark, SyntaxContext};
-use print::pprust;
-use ptr::P;
+pub use GenericArgs::*;
+pub use UnsafeSource::*;
+pub use crate::symbol::{Ident, Symbol as Name};
+pub use crate::util::parser::ExprPrecedence;
+
+use crate::ext::hygiene::{Mark, SyntaxContext};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::source_map::{dummy_spanned, respan, Spanned};
+use crate::symbol::{keywords, Symbol};
+use crate::tokenstream::TokenStream;
+use crate::ThinVec;
+
 use rustc_data_structures::indexed_vec::Idx;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert;
 use rustc_target::spec::abi::Abi;
-use source_map::{dummy_spanned, respan, Spanned};
-use symbol::{keywords, Symbol};
 use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::TokenStream;
-use ThinVec;
 
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::sync::Lrc;
@@ -31,7 +32,7 @@ pub struct Label {
 }
 
 impl fmt::Debug for Label {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "label({:?})", self.ident)
     }
 }
@@ -43,7 +44,7 @@ pub struct Lifetime {
 }
 
 impl fmt::Debug for Lifetime {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
             "lifetime({}: {})",
@@ -74,13 +75,13 @@ impl<'a> PartialEq<&'a str> for Path {
 }
 
 impl fmt::Debug for Path {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "path({})", pprust::path_to_string(self))
     }
 }
 
 impl fmt::Display for Path {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", pprust::path_to_string(self))
     }
 }
@@ -219,6 +220,7 @@ impl ParenthesizedArgs {
 // hack to ensure that we don't try to access the private parts of `NodeId` in this module
 mod node_id_inner {
     use rustc_data_structures::indexed_vec::Idx;
+    use rustc_data_structures::newtype_index;
     newtype_index! {
         pub struct NodeId {
             ENCODABLE = custom
@@ -227,7 +229,7 @@ mod node_id_inner {
     }
 }
 
-pub use self::node_id_inner::NodeId;
+pub use node_id_inner::NodeId;
 
 impl NodeId {
     pub fn placeholder_from_mark(mark: Mark) -> Self {
@@ -240,7 +242,7 @@ impl NodeId {
 }
 
 impl fmt::Display for NodeId {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(&self.as_u32(), f)
     }
 }
@@ -478,7 +480,7 @@ pub struct Pat {
 }
 
 impl fmt::Debug for Pat {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self))
     }
 }
@@ -676,7 +678,7 @@ pub enum BinOpKind {
 
 impl BinOpKind {
     pub fn to_string(&self) -> &'static str {
-        use self::BinOpKind::*;
+        use BinOpKind::*;
         match *self {
             Add => "+",
             Sub => "-",
@@ -713,7 +715,7 @@ impl BinOpKind {
     }
 
     pub fn is_comparison(&self) -> bool {
-        use self::BinOpKind::*;
+        use BinOpKind::*;
         match *self {
             Eq | Lt | Le | Ne | Gt | Ge => true,
             And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
@@ -792,7 +794,7 @@ impl Stmt {
 }
 
 impl fmt::Debug for Stmt {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
             "stmt({}: {})",
@@ -1030,7 +1032,7 @@ impl Expr {
 }
 
 impl fmt::Debug for Expr {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self))
     }
 }
@@ -1438,13 +1440,13 @@ pub enum IntTy {
 }
 
 impl fmt::Debug for IntTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(self, f)
     }
 }
 
 impl fmt::Display for IntTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", self.ty_to_string())
     }
 }
@@ -1519,13 +1521,13 @@ impl UintTy {
 }
 
 impl fmt::Debug for UintTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(self, f)
     }
 }
 
 impl fmt::Display for UintTy {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", self.ty_to_string())
     }
 }
@@ -1547,7 +1549,7 @@ pub struct Ty {
 }
 
 impl fmt::Debug for Ty {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "type({})", pprust::ty_to_string(self))
     }
 }
@@ -1832,7 +1834,7 @@ pub enum Defaultness {
 }
 
 impl fmt::Display for Unsafety {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(
             match *self {
                 Unsafety::Normal => "normal",
@@ -1852,7 +1854,7 @@ pub enum ImplPolarity {
 }
 
 impl fmt::Debug for ImplPolarity {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             ImplPolarity::Positive => "positive".fmt(f),
             ImplPolarity::Negative => "negative".fmt(f),
diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs
index 7fe6f4a2316..6f7761b54fc 100644
--- a/src/libsyntax/attr/builtin.rs
+++ b/src/libsyntax/attr/builtin.rs
@@ -1,9 +1,10 @@
 //! Parsing and validation of builtin attributes
 
-use ast::{self, Attribute, MetaItem, Name, NestedMetaItemKind};
-use errors::{Applicability, Handler};
-use feature_gate::{Features, GatedCfg};
-use parse::ParseSess;
+use crate::ast::{self, Attribute, MetaItem, Name, NestedMetaItemKind};
+use crate::errors::{Applicability, Handler};
+use crate::feature_gate::{Features, GatedCfg};
+use crate::parse::ParseSess;
+
 use syntax_pos::{symbol::Symbol, Span};
 
 use super::{list_contains_name, mark_used, MetaItemKind};
@@ -188,7 +189,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
                                  -> Option<Stability>
     where I: Iterator<Item = &'a Attribute>
 {
-    use self::StabilityLevel::*;
+    use StabilityLevel::*;
 
     let mut stab: Option<Stability> = None;
     let mut rustc_depr: Option<RustcDeprecation> = None;
@@ -694,7 +695,7 @@ pub enum IntType {
 impl IntType {
     #[inline]
     pub fn is_signed(self) -> bool {
-        use self::IntType::*;
+        use IntType::*;
 
         match self {
             SignedInt(..) => true,
@@ -711,7 +712,7 @@ impl IntType {
 /// structure layout, `packed` to remove padding, and `transparent` to elegate representation
 /// concerns to the only non-ZST field.
 pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
-    use self::ReprAttr::*;
+    use ReprAttr::*;
 
     let mut acc = Vec::new();
     let diagnostic = &sess.span_diagnostic;
@@ -831,7 +832,7 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec<ReprAttr> {
 }
 
 fn int_type_of_word(s: &str) -> Option<IntType> {
-    use self::IntType::*;
+    use IntType::*;
 
     match s {
         "i8" => Some(SignedInt(ast::IntTy::I8)),
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index c5a397e0480..0c3aedae715 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -2,31 +2,33 @@
 
 mod builtin;
 
-pub use self::builtin::{
+pub use builtin::{
     cfg_matches, contains_feature_attr, eval_condition, find_crate_name, find_deprecation,
     find_repr_attrs, find_stability, find_unwind_attr, Deprecation, InlineAttr, OptimizeAttr,
     IntType, ReprAttr, RustcDeprecation, Stability, StabilityLevel, UnwindAttr,
 };
-pub use self::IntType::*;
-pub use self::ReprAttr::*;
-pub use self::StabilityLevel::*;
-
-use ast;
-use ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
-use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
-use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
-use mut_visit::visit_clobber;
-use source_map::{BytePos, Spanned, respan, dummy_spanned};
+pub use IntType::*;
+pub use ReprAttr::*;
+pub use StabilityLevel::*;
+
+use crate::ast;
+use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
+use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
+use crate::ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
+use crate::mut_visit::visit_clobber;
+use crate::source_map::{BytePos, Spanned, respan, dummy_spanned};
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::parser::Parser;
+use crate::parse::{self, ParseSess, PResult};
+use crate::parse::token::{self, Token};
+use crate::ptr::P;
+use crate::symbol::Symbol;
+use crate::ThinVec;
+use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
+use crate::GLOBALS;
+
+use log::debug;
 use syntax_pos::{FileName, Span};
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::parser::Parser;
-use parse::{self, ParseSess, PResult};
-use parse::token::{self, Token};
-use ptr::P;
-use symbol::Symbol;
-use ThinVec;
-use tokenstream::{TokenStream, TokenTree, DelimSpan};
-use GLOBALS;
 
 use std::iter;
 use std::ops::DerefMut;
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index fce2601e3aa..bfc4457f054 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -1,20 +1,21 @@
-use attr::HasAttrs;
-use feature_gate::{
+use crate::attr::HasAttrs;
+use crate::feature_gate::{
     feature_err,
     EXPLAIN_STMT_ATTR_SYNTAX,
     Features,
     get_features,
     GateIssue,
 };
-use attr;
-use ast;
-use edition::Edition;
-use errors::Applicability;
-use mut_visit::*;
-use parse::{token, ParseSess};
-use ptr::P;
+use crate::attr;
+use crate::ast;
+use crate::edition::Edition;
+use crate::errors::Applicability;
+use crate::mut_visit::*;
+use crate::parse::{token, ParseSess};
+use crate::ptr::P;
+use crate::util::map_in_place::MapInPlace;
+
 use smallvec::SmallVec;
-use util::map_in_place::MapInPlace;
 
 /// A folder that strips out items that do not belong in the current configuration.
 pub struct StripUnconfigured<'a> {
diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs
index abde3dca0f6..3abb820a678 100644
--- a/src/libsyntax/diagnostics/metadata.rs
+++ b/src/libsyntax/diagnostics/metadata.rs
@@ -12,8 +12,9 @@ use std::error::Error;
 use rustc_serialize::json::as_json;
 
 use syntax_pos::{Span, FileName};
-use ext::base::ExtCtxt;
-use diagnostics::plugin::{ErrorMap, ErrorInfo};
+
+use crate::ext::base::ExtCtxt;
+use crate::diagnostics::plugin::{ErrorMap, ErrorInfo};
 
 /// JSON encodable/decodable version of `ErrorInfo`.
 #[derive(PartialEq, RustcDecodable, RustcEncodable)]
@@ -34,7 +35,7 @@ pub struct ErrorLocation {
 
 impl ErrorLocation {
     /// Create an error location from a span.
-    pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
+    pub fn from_span(ecx: &ExtCtxt<'_>, sp: Span) -> ErrorLocation {
         let loc = ecx.source_map().lookup_char_pos_adj(sp.lo());
         ErrorLocation {
             filename: loc.filename,
@@ -62,7 +63,7 @@ fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf {
 ///
 /// For our current purposes the prefix is the target architecture and the name is a crate name.
 /// If an error occurs steps will be taken to ensure that no file is created.
-pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap)
+pub fn output_metadata(ecx: &ExtCtxt<'_>, prefix: &str, name: &str, err_map: &ErrorMap)
     -> Result<(), Box<dyn Error>>
 {
     // Create the directory to place the file in.
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index fa6b825f2a2..e79378d93bd 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -1,20 +1,21 @@
 use std::collections::BTreeMap;
 use std::env;
 
-use ast;
-use ast::{Ident, Name};
-use source_map;
+use crate::ast::{self, Ident, Name};
+use crate::source_map;
+use crate::ext::base::{ExtCtxt, MacEager, MacResult};
+use crate::ext::build::AstBuilder;
+use crate::parse::token;
+use crate::ptr::P;
+use crate::symbol::{keywords, Symbol};
+use crate::tokenstream::{TokenTree};
+
+use smallvec::smallvec;
 use syntax_pos::Span;
-use ext::base::{ExtCtxt, MacEager, MacResult};
-use ext::build::AstBuilder;
-use parse::token;
-use ptr::P;
-use symbol::{keywords, Symbol};
-use tokenstream::{TokenTree};
 
-use diagnostics::metadata::output_metadata;
+use crate::diagnostics::metadata::output_metadata;
 
-pub use errors::*;
+pub use crate::errors::*;
 
 // Maximum width of any line in an extended error description (inclusive).
 const MAX_DESCRIPTION_WIDTH: usize = 80;
@@ -28,7 +29,7 @@ pub struct ErrorInfo {
 /// Mapping from error codes to metadata.
 pub type ErrorMap = BTreeMap<Name, ErrorInfo>;
 
-pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
+pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                    span: Span,
                                    token_tree: &[TokenTree])
                                    -> Box<dyn MacResult+'cx> {
@@ -61,7 +62,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
     MacEager::expr(ecx.expr_tuple(span, Vec::new()))
 }
 
-pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
+pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                        span: Span,
                                        token_tree: &[TokenTree])
                                        -> Box<dyn MacResult+'cx> {
@@ -134,7 +135,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
 }
 
 #[allow(deprecated)]
-pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
+pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                           span: Span,
                                           token_tree: &[TokenTree])
                                           -> Box<dyn MacResult+'cx> {
diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs
index cf9671a14b3..977e6d45877 100644
--- a/src/libsyntax/early_buffered_lints.rs
+++ b/src/libsyntax/early_buffered_lints.rs
@@ -3,7 +3,7 @@
 //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
 //! redundant. Later, these types can be converted to types for use by the rest of the compiler.
 
-use syntax::ast::NodeId;
+use crate::syntax::ast::NodeId;
 use syntax_pos::MultiSpan;
 
 /// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be
diff --git a/src/libsyntax/entry.rs b/src/libsyntax/entry.rs
index 72a550a05d5..09e26e29d86 100644
--- a/src/libsyntax/entry.rs
+++ b/src/libsyntax/entry.rs
@@ -1,5 +1,5 @@
-use attr;
-use ast::{Item, ItemKind};
+use crate::attr;
+use crate::ast::{Item, ItemKind};
 
 pub enum EntryPointType {
     None,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index b53068f5bc2..465b53184dc 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -1,28 +1,29 @@
-pub use self::SyntaxExtension::*;
-
-use ast::{self, Attribute, Name, PatKind, MetaItem};
-use attr::HasAttrs;
-use source_map::{SourceMap, Spanned, respan};
+pub use SyntaxExtension::*;
+
+use crate::ast::{self, Attribute, Name, PatKind, MetaItem};
+use crate::attr::HasAttrs;
+use crate::source_map::{SourceMap, Spanned, respan};
+use crate::edition::Edition;
+use crate::errors::{DiagnosticBuilder, DiagnosticId};
+use crate::ext::expand::{self, AstFragment, Invocation};
+use crate::ext::hygiene::{self, Mark, SyntaxContext, Transparency};
+use crate::mut_visit::{self, MutVisitor};
+use crate::parse::{self, parser, DirectoryOwnership};
+use crate::parse::token;
+use crate::ptr::P;
+use crate::symbol::{keywords, Ident, Symbol};
+use crate::ThinVec;
+use crate::tokenstream::{self, TokenStream};
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{Span, MultiSpan, DUMMY_SP};
-use edition::Edition;
-use errors::{DiagnosticBuilder, DiagnosticId};
-use ext::expand::{self, AstFragment, Invocation};
-use ext::hygiene::{self, Mark, SyntaxContext, Transparency};
-use mut_visit::{self, MutVisitor};
-use parse::{self, parser, DirectoryOwnership};
-use parse::token;
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::{keywords, Ident, Symbol};
-use ThinVec;
 
 use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::{self, Lrc};
 use std::iter;
 use std::path::PathBuf;
 use std::rc::Rc;
-use rustc_data_structures::sync::{self, Lrc};
 use std::default::Default;
-use tokenstream::{self, TokenStream};
 
 
 #[derive(Debug,Clone)]
@@ -139,7 +140,7 @@ impl Annotatable {
 // A more flexible ItemDecorator.
 pub trait MultiItemDecorator {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               sp: Span,
               meta_item: &ast::MetaItem,
               item: &Annotatable,
@@ -147,10 +148,10 @@ pub trait MultiItemDecorator {
 }
 
 impl<F> MultiItemDecorator for F
-    where F : Fn(&mut ExtCtxt, Span, &ast::MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
+    where F : Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
 {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               sp: Span,
               meta_item: &ast::MetaItem,
               item: &Annotatable,
@@ -163,7 +164,7 @@ impl<F> MultiItemDecorator for F
 // FIXME Decorators should follow the same pattern too.
 pub trait MultiItemModifier {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               span: Span,
               meta_item: &ast::MetaItem,
               item: Annotatable)
@@ -171,11 +172,11 @@ pub trait MultiItemModifier {
 }
 
 impl<F, T> MultiItemModifier for F
-    where F: Fn(&mut ExtCtxt, Span, &ast::MetaItem, Annotatable) -> T,
+    where F: Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, Annotatable) -> T,
           T: Into<Vec<Annotatable>>,
 {
     fn expand(&self,
-              ecx: &mut ExtCtxt,
+              ecx: &mut ExtCtxt<'_>,
               span: Span,
               meta_item: &ast::MetaItem,
               item: Annotatable)
@@ -192,7 +193,7 @@ impl Into<Vec<Annotatable>> for Annotatable {
 
 pub trait ProcMacro {
     fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
+                   ecx: &'cx mut ExtCtxt<'_>,
                    span: Span,
                    ts: TokenStream)
                    -> TokenStream;
@@ -202,7 +203,7 @@ impl<F> ProcMacro for F
     where F: Fn(TokenStream) -> TokenStream
 {
     fn expand<'cx>(&self,
-                   _ecx: &'cx mut ExtCtxt,
+                   _ecx: &'cx mut ExtCtxt<'_>,
                    _span: Span,
                    ts: TokenStream)
                    -> TokenStream {
@@ -213,7 +214,7 @@ impl<F> ProcMacro for F
 
 pub trait AttrProcMacro {
     fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
+                   ecx: &'cx mut ExtCtxt<'_>,
                    span: Span,
                    annotation: TokenStream,
                    annotated: TokenStream)
@@ -224,7 +225,7 @@ impl<F> AttrProcMacro for F
     where F: Fn(TokenStream, TokenStream) -> TokenStream
 {
     fn expand<'cx>(&self,
-                   _ecx: &'cx mut ExtCtxt,
+                   _ecx: &'cx mut ExtCtxt<'_>,
                    _span: Span,
                    annotation: TokenStream,
                    annotated: TokenStream)
@@ -238,7 +239,7 @@ impl<F> AttrProcMacro for F
 pub trait TTMacroExpander {
     fn expand<'cx>(
         &self,
-        ecx: &'cx mut ExtCtxt,
+        ecx: &'cx mut ExtCtxt<'_>,
         span: Span,
         input: TokenStream,
         def_span: Option<Span>,
@@ -246,16 +247,16 @@ pub trait TTMacroExpander {
 }
 
 pub type MacroExpanderFn =
-    for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
                 -> Box<dyn MacResult+'cx>;
 
 impl<F> TTMacroExpander for F
-    where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
+    where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
     -> Box<dyn MacResult+'cx>
 {
     fn expand<'cx>(
         &self,
-        ecx: &'cx mut ExtCtxt,
+        ecx: &'cx mut ExtCtxt<'_>,
         span: Span,
         input: TokenStream,
         _def_span: Option<Span>,
@@ -286,7 +287,7 @@ impl<F> TTMacroExpander for F
 
 pub trait IdentMacroExpander {
     fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt,
+                   cx: &'cx mut ExtCtxt<'_>,
                    sp: Span,
                    ident: ast::Ident,
                    token_tree: Vec<tokenstream::TokenTree>)
@@ -294,15 +295,15 @@ pub trait IdentMacroExpander {
 }
 
 pub type IdentMacroExpanderFn =
-    for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>)
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, ast::Ident, Vec<tokenstream::TokenTree>)
                 -> Box<dyn MacResult+'cx>;
 
 impl<F> IdentMacroExpander for F
-    where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident,
+    where F : for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, ast::Ident,
                           Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx>
 {
     fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt,
+                   cx: &'cx mut ExtCtxt<'_>,
                    sp: Span,
                    ident: ast::Ident,
                    token_tree: Vec<tokenstream::TokenTree>)
@@ -567,7 +568,7 @@ impl MacResult for DummyResult {
 }
 
 pub type BuiltinDeriveFn =
-    for<'cx> fn(&'cx mut ExtCtxt, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable));
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable));
 
 /// Represents different kinds of macro invocations that can be resolved.
 #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -979,7 +980,7 @@ impl<'a> ExtCtxt<'a> {
 /// emitting `err_msg` if `expr` is not a string literal. This does not stop
 /// compilation on error, merely emits a non-fatal error and returns None.
 pub fn expr_to_spanned_string<'a>(
-    cx: &'a mut ExtCtxt,
+    cx: &'a mut ExtCtxt<'_>,
     mut expr: P<ast::Expr>,
     err_msg: &str,
 ) -> Result<Spanned<(Symbol, ast::StrStyle)>, Option<DiagnosticBuilder<'a>>> {
@@ -998,7 +999,7 @@ pub fn expr_to_spanned_string<'a>(
     })
 }
 
-pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
+pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
                       -> Option<(Symbol, ast::StrStyle)> {
     expr_to_spanned_string(cx, expr, err_msg)
         .map_err(|err| err.map(|mut err| err.emit()))
@@ -1011,7 +1012,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
 /// compilation should call
 /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
 /// done as rarely as possible).
-pub fn check_zero_tts(cx: &ExtCtxt,
+pub fn check_zero_tts(cx: &ExtCtxt<'_>,
                       sp: Span,
                       tts: &[tokenstream::TokenTree],
                       name: &str) {
@@ -1022,7 +1023,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
 
 /// Interpreting `tts` as a comma-separated sequence of expressions,
 /// expect exactly one string literal, or emit an error and return None.
-pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
+pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
                                sp: Span,
                                tts: &[tokenstream::TokenTree],
                                name: &str)
@@ -1045,7 +1046,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
 
 /// Extract comma-separated expressions from `tts`. If there is a
 /// parsing error, emit a non-fatal error and return None.
-pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
+pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
                           sp: Span,
                           tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
     let mut p = cx.new_parser_from_tts(tts);
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index a8eec1a74dd..1e83f6c03ec 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -1,17 +1,18 @@
+use crate::ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
+use crate::attr;
+use crate::source_map::{dummy_spanned, respan, Spanned};
+use crate::ext::base::ExtCtxt;
+use crate::ptr::P;
+use crate::symbol::{Symbol, keywords};
+use crate::ThinVec;
+
 use rustc_target::spec::abi::Abi;
-use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
-use attr;
 use syntax_pos::{Pos, Span, DUMMY_SP};
-use source_map::{dummy_spanned, respan, Spanned};
-use ext::base::ExtCtxt;
-use ptr::P;
-use symbol::{Symbol, keywords};
-use ThinVec;
 
 // Transitional re-exports so qquote can find the paths it is looking for
 mod syntax {
-    pub use ext;
-    pub use parse;
+    pub use crate::ext;
+    pub use crate::parse;
 }
 
 pub trait AstBuilder {
diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs
index fa8cf6c496a..50cec9e7908 100644
--- a/src/libsyntax/ext/derive.rs
+++ b/src/libsyntax/ext/derive.rs
@@ -1,15 +1,16 @@
-use attr::HasAttrs;
-use ast;
-use source_map::{hygiene, ExpnInfo, ExpnFormat};
-use ext::base::ExtCtxt;
-use ext::build::AstBuilder;
-use parse::parser::PathStyle;
-use symbol::Symbol;
+use crate::attr::HasAttrs;
+use crate::ast;
+use crate::source_map::{hygiene, ExpnInfo, ExpnFormat};
+use crate::ext::base::ExtCtxt;
+use crate::ext::build::AstBuilder;
+use crate::parse::parser::PathStyle;
+use crate::symbol::Symbol;
+
 use syntax_pos::Span;
 
 use rustc_data_structures::fx::FxHashSet;
 
-pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
+pub fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
     let mut result = Vec::new();
     attrs.retain(|attr| {
         if attr.path != "derive" {
@@ -40,7 +41,7 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec
     result
 }
 
-pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: &mut T)
+pub fn add_derived_markers<T>(cx: &mut ExtCtxt<'_>, span: Span, traits: &[ast::Path], item: &mut T)
     where T: HasAttrs,
 {
     let (mut names, mut pretty_name) = (FxHashSet::default(), "derive(".to_owned());
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index a0ccce98659..89d59478a5d 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -1,27 +1,28 @@
-use ast::{self, Block, Ident, LitKind, NodeId, PatKind, Path};
-use ast::{MacStmtStyle, StmtKind, ItemKind};
-use attr::{self, HasAttrs};
-use source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan};
-use config::StripUnconfigured;
-use errors::{Applicability, FatalError};
-use ext::base::*;
-use ext::derive::{add_derived_markers, collect_derives};
-use ext::hygiene::{self, Mark, SyntaxContext};
-use ext::placeholders::{placeholder, PlaceholderExpander};
-use feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
-use mut_visit::*;
-use parse::{DirectoryOwnership, PResult, ParseSess};
-use parse::token::{self, Token};
-use parse::parser::Parser;
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::Symbol;
-use symbol::keywords;
+use crate::ast::{self, Block, Ident, LitKind, NodeId, PatKind, Path};
+use crate::ast::{MacStmtStyle, StmtKind, ItemKind};
+use crate::attr::{self, HasAttrs};
+use crate::source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan};
+use crate::config::StripUnconfigured;
+use crate::errors::{Applicability, FatalError};
+use crate::ext::base::*;
+use crate::ext::derive::{add_derived_markers, collect_derives};
+use crate::ext::hygiene::{self, Mark, SyntaxContext};
+use crate::ext::placeholders::{placeholder, PlaceholderExpander};
+use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
+use crate::mut_visit::*;
+use crate::parse::{DirectoryOwnership, PResult, ParseSess};
+use crate::parse::token::{self, Token};
+use crate::parse::parser::Parser;
+use crate::ptr::P;
+use crate::symbol::Symbol;
+use crate::symbol::keywords;
+use crate::tokenstream::{TokenStream, TokenTree};
+use crate::visit::{self, Visitor};
+use crate::util::map_in_place::MapInPlace;
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{Span, DUMMY_SP, FileName};
 use syntax_pos::hygiene::ExpnFormat;
-use tokenstream::{TokenStream, TokenTree};
-use visit::{self, Visitor};
-use util::map_in_place::MapInPlace;
 
 use rustc_data_structures::fx::FxHashMap;
 use std::fs;
@@ -129,8 +130,8 @@ macro_rules! ast_fragments {
             })*)*
         }
 
-        impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> {
-            $(fn $make_ast(self: Box<::ext::tt::macro_rules::ParserAnyMacro<'a>>)
+        impl<'a> MacResult for crate::ext::tt::macro_rules::ParserAnyMacro<'a> {
+            $(fn $make_ast(self: Box<crate::ext::tt::macro_rules::ParserAnyMacro<'a>>)
                            -> Option<$AstTy> {
                 Some(self.make(AstFragmentKind::$Kind).$make_ast())
             })*
diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs
index 23b34c2660b..3e60dd81a3b 100644
--- a/src/libsyntax/ext/placeholders.rs
+++ b/src/libsyntax/ext/placeholders.rs
@@ -1,14 +1,15 @@
-use ast::{self, NodeId};
-use source_map::{DUMMY_SP, dummy_spanned};
-use ext::base::ExtCtxt;
-use ext::expand::{AstFragment, AstFragmentKind};
-use ext::hygiene::Mark;
-use tokenstream::TokenStream;
-use mut_visit::*;
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::keywords;
-use ThinVec;
+use crate::ast::{self, NodeId};
+use crate::source_map::{DUMMY_SP, dummy_spanned};
+use crate::ext::base::ExtCtxt;
+use crate::ext::expand::{AstFragment, AstFragmentKind};
+use crate::ext::hygiene::Mark;
+use crate::tokenstream::TokenStream;
+use crate::mut_visit::*;
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::ThinVec;
+
+use smallvec::{smallvec, SmallVec};
 
 use rustc_data_structures::fx::FxHashMap;
 
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index e63042a4208..31a134b856d 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -1,15 +1,14 @@
-use ast;
-use syntax_pos::{self, Pos, Span, FileName};
-use ext::base::*;
-use ext::base;
-use ext::build::AstBuilder;
-use parse::{token, DirectoryOwnership};
-use parse;
-use print::pprust;
-use ptr::P;
+use crate::ast;
+use crate::ext::base::{self, *};
+use crate::ext::build::AstBuilder;
+use crate::parse::{self, token, DirectoryOwnership};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::symbol::Symbol;
+use crate::tokenstream;
+
 use smallvec::SmallVec;
-use symbol::Symbol;
-use tokenstream;
+use syntax_pos::{self, Pos, Span, FileName};
 
 use std::fs;
 use std::io::ErrorKind;
@@ -21,7 +20,7 @@ use rustc_data_structures::sync::Lrc;
 // a given file into the current one.
 
 /// line!(): expands to the current line number
-pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "line!");
 
@@ -32,7 +31,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 }
 
 /* column!(): expands to the current column number */
-pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "column!");
 
@@ -43,7 +42,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 }
 
 /* __rust_unstable_column!(): expands to the current column number */
-pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_column_gated(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
     if sp.allows_unstable() {
         expand_column(cx, sp, tts)
@@ -55,7 +54,7 @@ pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
 /// file!(): expands to the current filename */
 /// The source_file (`loc.file`) contains a bunch more information we could spit
 /// out if we wanted.
-pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "file!");
 
@@ -64,13 +63,13 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
     base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
 }
 
-pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                         -> Box<dyn base::MacResult+'static> {
     let s = pprust::tts_to_string(tts);
     base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
 }
 
-pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<dyn base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "module_path!");
     let mod_path = &cx.current_expansion.module.mod_path;
@@ -82,7 +81,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 /// include! : parse the given file as an expr
 /// This is generally a bad idea because it's going to behave
 /// unhygienically.
-pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                            -> Box<dyn base::MacResult+'cx> {
     let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
         Some(f) => f,
@@ -120,7 +119,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T
 }
 
 // include_str! : read the given file, insert it as a literal string expr
-pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                           -> Box<dyn base::MacResult+'static> {
     let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
         Some(f) => f,
@@ -148,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
     }
 }
 
-pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
+pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
                             -> Box<dyn base::MacResult+'static> {
     let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
         Some(f) => f,
@@ -178,7 +177,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
 
 // resolve a file-system path to an absolute file-system path (if it
 // isn't already)
-fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf {
+fn res_rel_file(cx: &mut ExtCtxt<'_>, sp: syntax_pos::Span, arg: String) -> PathBuf {
     let arg = PathBuf::from(arg);
     // Relative paths are resolved relative to the file in which they are found
     // after macro expansion (that is, they are unhygienic).
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index b4003ac729a..a9000b89fb4 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -70,21 +70,22 @@
 //! eof: [a $( a )* a b ·]
 //! ```
 
-pub use self::NamedMatch::*;
-pub use self::ParseResult::*;
-use self::TokenTreeOrTokenTreeSlice::*;
-
-use ast::Ident;
+pub use NamedMatch::*;
+pub use ParseResult::*;
+use TokenTreeOrTokenTreeSlice::*;
+
+use crate::ast::Ident;
+use crate::errors::FatalError;
+use crate::ext::tt::quoted::{self, TokenTree};
+use crate::parse::{Directory, ParseSess};
+use crate::parse::parser::{Parser, PathStyle};
+use crate::parse::token::{self, DocComment, Nonterminal, Token};
+use crate::print::pprust;
+use crate::symbol::keywords;
+use crate::tokenstream::{DelimSpan, TokenStream};
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{self, Span};
-use errors::FatalError;
-use ext::tt::quoted::{self, TokenTree};
-use parse::{Directory, ParseSess};
-use parse::parser::{Parser, PathStyle};
-use parse::token::{self, DocComment, Nonterminal, Token};
-use print::pprust;
-use smallvec::SmallVec;
-use symbol::keywords;
-use tokenstream::{DelimSpan, TokenStream};
 
 use rustc_data_structures::fx::FxHashMap;
 use std::collections::hash_map::Entry::{Occupied, Vacant};
@@ -649,7 +650,7 @@ pub fn parse(
     sess: &ParseSess,
     tts: TokenStream,
     ms: &[TokenTree],
-    directory: Option<Directory>,
+    directory: Option<Directory<'_>>,
     recurse_into_modules: bool,
 ) -> NamedParseResult {
     // Create a parser that can be used for the "black box" parts.
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 176575b67ea..b3ecaeaedbb 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -1,29 +1,31 @@
-use {ast, attr};
+use crate::{ast, attr};
+use crate::edition::Edition;
+use crate::errors::FatalError;
+use crate::ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
+use crate::ext::base::{NormalTT, TTMacroExpander};
+use crate::ext::expand::{AstFragment, AstFragmentKind};
+use crate::ext::tt::macro_parser::{Success, Error, Failure};
+use crate::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
+use crate::ext::tt::macro_parser::{parse, parse_failure_msg};
+use crate::ext::tt::quoted;
+use crate::ext::tt::transcribe::transcribe;
+use crate::feature_gate::Features;
+use crate::parse::{Directory, ParseSess};
+use crate::parse::parser::Parser;
+use crate::parse::token::{self, NtTT};
+use crate::parse::token::Token::*;
+use crate::symbol::Symbol;
+use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
+
 use syntax_pos::{Span, DUMMY_SP};
-use edition::Edition;
-use errors::FatalError;
-use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
-use ext::base::{NormalTT, TTMacroExpander};
-use ext::expand::{AstFragment, AstFragmentKind};
-use ext::tt::macro_parser::{Success, Error, Failure};
-use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
-use ext::tt::macro_parser::{parse, parse_failure_msg};
-use ext::tt::quoted;
-use ext::tt::transcribe::transcribe;
-use feature_gate::Features;
-use parse::{Directory, ParseSess};
-use parse::parser::Parser;
-use parse::token::{self, NtTT};
-use parse::token::Token::*;
-use symbol::Symbol;
-use tokenstream::{DelimSpan, TokenStream, TokenTree};
+use log::debug;
 
 use rustc_data_structures::fx::FxHashMap;
 use std::borrow::Cow;
 use std::collections::hash_map::Entry;
 
 use rustc_data_structures::sync::Lrc;
-use errors::Applicability;
+use crate::errors::Applicability;
 
 const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
     `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, \
@@ -91,7 +93,7 @@ struct MacroRulesMacroExpander {
 impl TTMacroExpander for MacroRulesMacroExpander {
     fn expand<'cx>(
         &self,
-        cx: &'cx mut ExtCtxt,
+        cx: &'cx mut ExtCtxt<'_>,
         sp: Span,
         input: TokenStream,
         def_span: Option<Span>,
@@ -109,13 +111,13 @@ impl TTMacroExpander for MacroRulesMacroExpander {
     }
 }
 
-fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
+fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) {
     let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
     cx.expansions.entry(sp).or_default().push(message);
 }
 
 /// Given `lhses` and `rhses`, this is the new macro we create
-fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
+fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
                           sp: Span,
                           def_span: Option<Span>,
                           name: ast::Ident,
@@ -423,7 +425,7 @@ fn check_lhs_nt_follows(sess: &ParseSess,
 /// Check that the lhs contains no repetition which could match an empty token
 /// tree, because then the matcher would hang indefinitely.
 fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
     for tt in tts {
         match *tt {
             TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
@@ -497,7 +499,7 @@ struct FirstSets {
 
 impl FirstSets {
     fn new(tts: &[quoted::TokenTree]) -> FirstSets {
-        use self::quoted::TokenTree;
+        use quoted::TokenTree;
 
         let mut sets = FirstSets { first: FxHashMap::default() };
         build_recur(&mut sets, tts);
@@ -567,7 +569,7 @@ impl FirstSets {
     // walks forward over `tts` until all potential FIRST tokens are
     // identified.
     fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
-        use self::quoted::TokenTree;
+        use quoted::TokenTree;
 
         let mut first = TokenSet::empty();
         for tt in tts.iter() {
@@ -721,7 +723,7 @@ fn check_matcher_core(sess: &ParseSess,
                       first_sets: &FirstSets,
                       matcher: &[quoted::TokenTree],
                       follow: &TokenSet) -> TokenSet {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
 
     let mut last = TokenSet::empty();
 
@@ -940,7 +942,7 @@ enum IsInFollow {
 /// separator.
 // when changing this do not forget to update doc/book/macros.md!
 fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
 
     if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
         // closing a token tree can never be matched by any fragment;
@@ -1072,7 +1074,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
 
 fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
-        quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
+        quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index b56871a1885..6c3cf3e6312 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -1,13 +1,14 @@
-use ast::NodeId;
-use early_buffered_lints::BufferedEarlyLintId;
-use ext::tt::macro_parser;
-use feature_gate::Features;
-use parse::{token, ParseSess};
-use print::pprust;
-use symbol::keywords;
+use crate::ast::NodeId;
+use crate::early_buffered_lints::BufferedEarlyLintId;
+use crate::ext::tt::macro_parser;
+use crate::feature_gate::Features;
+use crate::parse::{token, ParseSess};
+use crate::print::pprust;
+use crate::tokenstream::{self, DelimSpan};
+use crate::ast;
+use crate::symbol::keywords;
+
 use syntax_pos::{edition::Edition, BytePos, Span};
-use tokenstream::{self, DelimSpan};
-use ast;
 
 use rustc_data_structures::sync::Lrc;
 use std::iter::Peekable;
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 08f34b22328..b9a50cc6488 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -1,13 +1,14 @@
-use ast::Ident;
-use ext::base::ExtCtxt;
-use ext::expand::Marker;
-use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use ext::tt::quoted;
-use mut_visit::noop_visit_tt;
-use parse::token::{self, Token, NtTT};
-use smallvec::SmallVec;
+use crate::ast::Ident;
+use crate::ext::base::ExtCtxt;
+use crate::ext::expand::Marker;
+use crate::ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
+use crate::ext::tt::quoted;
+use crate::mut_visit::noop_visit_tt;
+use crate::parse::token::{self, Token, NtTT};
+use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
+
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::DUMMY_SP;
-use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
@@ -56,7 +57,7 @@ impl Iterator for Frame {
 /// This can do Macro-By-Example transcription. On the other hand, if
 /// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
 /// (and should) be None.
-pub fn transcribe(cx: &ExtCtxt,
+pub fn transcribe(cx: &ExtCtxt<'_>,
                   interp: Option<FxHashMap<Ident, Rc<NamedMatch>>>,
                   src: Vec<quoted::TokenTree>)
                   -> TokenStream {
@@ -230,7 +231,7 @@ fn lockstep_iter_size(tree: &quoted::TokenTree,
                       interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
                       repeats: &[(usize, usize)])
                       -> LockstepIterSize {
-    use self::quoted::TokenTree;
+    use quoted::TokenTree;
     match *tree {
         TokenTree::Delimited(_, ref delimed) => {
             delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index bf4c637045f..6c780369a0a 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -12,21 +12,23 @@
 //! gate usage is added, *do not remove it again* even once the feature
 //! becomes stable.
 
-use self::AttributeType::*;
-use self::AttributeGate::*;
+use AttributeType::*;
+use AttributeGate::*;
+
+use crate::ast::{self, NodeId, PatKind, RangeEnd};
+use crate::attr;
+use crate::early_buffered_lints::BufferedEarlyLintId;
+use crate::source_map::Spanned;
+use crate::edition::{ALL_EDITIONS, Edition};
+use crate::errors::{DiagnosticBuilder, Handler};
+use crate::visit::{self, FnKind, Visitor};
+use crate::parse::ParseSess;
+use crate::symbol::Symbol;
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_target::spec::abi::Abi;
-use ast::{self, NodeId, PatKind, RangeEnd};
-use attr;
-use early_buffered_lints::BufferedEarlyLintId;
-use source_map::Spanned;
-use edition::{ALL_EDITIONS, Edition};
 use syntax_pos::{Span, DUMMY_SP};
-use errors::{DiagnosticBuilder, Handler};
-use visit::{self, FnKind, Visitor};
-use parse::ParseSess;
-use symbol::Symbol;
+use log::debug;
 
 use std::env;
 
@@ -778,8 +780,8 @@ pub enum Stability {
 }
 
 // fn() is not Debug
-impl ::std::fmt::Debug for AttributeGate {
-    fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
+impl std::fmt::Debug for AttributeGate {
+    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match *self {
             Gated(ref stab, name, expl, _) =>
                 write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index cf11ac550b7..2953b35298e 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -9,13 +9,13 @@
 
 // FIXME: spec the JSON output properly.
 
-use source_map::{SourceMap, FilePathMapping};
-use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
-use errors::registry::Registry;
-use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
-use errors::{DiagnosticId, Applicability};
-use errors::emitter::{Emitter, EmitterWriter};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::registry::Registry;
+use crate::errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
+use crate::errors::{DiagnosticId, Applicability};
+use crate::errors::emitter::{Emitter, EmitterWriter};
 
+use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
 use rustc_data_structures::sync::{self, Lrc};
 use std::io::{self, Write};
 use std::vec;
@@ -69,7 +69,7 @@ impl JsonEmitter {
 }
 
 impl Emitter for JsonEmitter {
-    fn emit(&mut self, db: &DiagnosticBuilder) {
+    fn emit(&mut self, db: &DiagnosticBuilder<'_>) {
         let data = Diagnostic::from_diagnostic_builder(db, self);
         let result = if self.pretty {
             writeln!(&mut self.dst, "{}", as_pretty_json(&data))
@@ -159,7 +159,7 @@ struct DiagnosticCode {
 }
 
 impl Diagnostic {
-    fn from_diagnostic_builder(db: &DiagnosticBuilder,
+    fn from_diagnostic_builder(db: &DiagnosticBuilder<'_>,
                                je: &JsonEmitter)
                                -> Diagnostic {
         let sugg = db.suggestions.iter().map(|sugg| {
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index f2b8f23ee85..c04391b34ee 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -9,9 +9,10 @@
        html_root_url = "https://doc.rust-lang.org/nightly/",
        test(attr(deny(warnings))))]
 
+#![deny(rust_2018_idioms)]
+
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
-#![feature(nll)]
 #![feature(rustc_attrs)]
 #![feature(rustc_diagnostic_macros)]
 #![feature(slice_sort_by_cached_key)]
@@ -22,20 +23,10 @@
 
 #![recursion_limit="256"]
 
-#[macro_use] extern crate bitflags;
-extern crate core;
-extern crate serialize;
-#[macro_use] extern crate log;
-pub extern crate rustc_errors as errors;
-extern crate syntax_pos;
-#[macro_use] extern crate rustc_data_structures;
-extern crate rustc_target;
-#[macro_use] extern crate scoped_tls;
-#[macro_use]
-extern crate smallvec;
-
+#[allow(unused_extern_crates)]
 extern crate serialize as rustc_serialize; // used by deriving
 
+pub use rustc_errors as errors;
 use rustc_data_structures::sync::Lock;
 use rustc_data_structures::bit_set::GrowableBitSet;
 pub use rustc_data_structures::thin_vec::ThinVec;
@@ -48,7 +39,7 @@ use ast::AttrId;
 macro_rules! panictry {
     ($e:expr) => ({
         use std::result::Result::{Ok, Err};
-        use errors::FatalError;
+        use crate::errors::FatalError;
         match $e {
             Ok(e) => e,
             Err(mut e) => {
@@ -63,7 +54,7 @@ macro_rules! panictry {
 macro_rules! panictry_buffer {
     ($handler:expr, $e:expr) => ({
         use std::result::Result::{Ok, Err};
-        use errors::{FatalError, DiagnosticBuilder};
+        use crate::errors::{FatalError, DiagnosticBuilder};
         match $e {
             Ok(e) => e,
             Err(errs) => {
@@ -113,7 +104,7 @@ pub fn with_globals<F, R>(f: F) -> R
     })
 }
 
-scoped_thread_local!(pub static GLOBALS: Globals);
+scoped_tls::scoped_thread_local!(pub static GLOBALS: Globals);
 
 #[macro_use]
 pub mod diagnostics {
@@ -139,9 +130,9 @@ pub mod util {
 pub mod json;
 
 pub mod syntax {
-    pub use ext;
-    pub use parse;
-    pub use ast;
+    pub use crate::ext;
+    pub use crate::parse;
+    pub use crate::ast;
 }
 
 pub mod ast;
diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs
index 93fedb73d27..0fd8bbf100f 100644
--- a/src/libsyntax/mut_visit.rs
+++ b/src/libsyntax/mut_visit.rs
@@ -7,19 +7,20 @@
 //! a MutVisitor renaming item names in a module will miss all of those
 //! that are created by the expansion of a macro.
 
-use ast::*;
+use crate::ast::*;
+use crate::source_map::{Spanned, respan};
+use crate::parse::token::{self, Token};
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::ThinVec;
+use crate::tokenstream::*;
+use crate::util::map_in_place::MapInPlace;
+
+use smallvec::{smallvec, Array, SmallVec};
 use syntax_pos::Span;
-use source_map::{Spanned, respan};
-use parse::token::{self, Token};
-use ptr::P;
-use smallvec::{Array, SmallVec};
-use std::ops::DerefMut;
-use symbol::keywords;
-use ThinVec;
-use tokenstream::*;
-use util::map_in_place::MapInPlace;
 
 use rustc_data_structures::sync::Lrc;
+use std::ops::DerefMut;
 
 pub trait ExpectOne<A: Array> {
     fn expect_one(self, err: &'static str) -> A::Item;
@@ -1256,15 +1257,15 @@ pub fn noop_visit_vis<T: MutVisitor>(Spanned { node, span }: &mut Visibility, vi
 #[cfg(test)]
 mod tests {
     use std::io;
-    use ast::{self, Ident};
-    use util::parser_testing::{string_to_crate, matches_codepattern};
-    use print::pprust;
-    use mut_visit;
-    use with_globals;
+    use crate::ast::{self, Ident};
+    use crate::util::parser_testing::{string_to_crate, matches_codepattern};
+    use crate::print::pprust;
+    use crate::mut_visit;
+    use crate::with_globals;
     use super::*;
 
     // this version doesn't care about getting comments or docstrings in.
-    fn fake_print_crate(s: &mut pprust::State,
+    fn fake_print_crate(s: &mut pprust::State<'_>,
                         krate: &ast::Crate) -> io::Result<()> {
         s.print_mod(&krate.module, &krate.attrs)
     }
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 914a0667ebf..b36ca0574cb 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -1,10 +1,12 @@
-use attr;
-use ast;
-use source_map::respan;
-use parse::{SeqSep, PResult};
-use parse::token::{self, Nonterminal, DelimToken};
-use parse::parser::{Parser, TokenType, PathStyle};
-use tokenstream::{TokenStream, TokenTree};
+use crate::attr;
+use crate::ast;
+use crate::source_map::respan;
+use crate::parse::{SeqSep, PResult};
+use crate::parse::token::{self, Nonterminal, DelimToken};
+use crate::parse::parser::{Parser, TokenType, PathStyle};
+use crate::tokenstream::{TokenStream, TokenTree};
+
+use log::debug;
 
 #[derive(Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -74,7 +76,7 @@ impl<'a> Parser<'a> {
     /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy`
     /// that prescribes how to handle inner attributes.
     fn parse_attribute_with_inner_parse_policy(&mut self,
-                                               inner_parse_policy: InnerAttributeParsePolicy)
+                                               inner_parse_policy: InnerAttributeParsePolicy<'_>)
                                                -> PResult<'a, ast::Attribute> {
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index a1cdfd9146a..b4103440e35 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -2,7 +2,7 @@
 
 // Predicates on exprs and stmts that the pretty-printer and parser use
 
-use ast;
+use crate::ast;
 
 /// Does this expression require a semicolon to be treated
 /// as a statement? The negation of this: 'can this expression
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index ffc480d829d..4632d814d5c 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -1,11 +1,13 @@
-pub use self::CommentStyle::*;
+pub use CommentStyle::*;
+
+use crate::ast;
+use crate::source_map::SourceMap;
+use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
+use crate::parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
+use crate::print::pprust;
 
-use ast;
-use source_map::SourceMap;
 use syntax_pos::{BytePos, CharPos, Pos, FileName};
-use parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
-use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
-use print::pprust;
+use log::debug;
 
 use std::io::Read;
 use std::usize;
@@ -135,7 +137,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
     panic!("not a doc-comment: {}", comment);
 }
 
-fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
+fn push_blank_line_comment(rdr: &StringReader<'_>, comments: &mut Vec<Comment>) {
     debug!(">>> blank-line comment");
     comments.push(Comment {
         style: BlankLine,
@@ -144,7 +146,10 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
     });
 }
 
-fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) {
+fn consume_whitespace_counting_blank_lines(
+    rdr: &mut StringReader<'_>,
+    comments: &mut Vec<Comment>
+) {
     while is_pattern_whitespace(rdr.ch) && !rdr.is_eof() {
         if rdr.ch_is('\n') {
             push_blank_line_comment(rdr, &mut *comments);
@@ -153,7 +158,7 @@ fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mu
     }
 }
 
-fn read_shebang_comment(rdr: &mut StringReader,
+fn read_shebang_comment(rdr: &mut StringReader<'_>,
                         code_to_the_left: bool,
                         comments: &mut Vec<Comment>) {
     debug!(">>> shebang comment");
@@ -166,7 +171,7 @@ fn read_shebang_comment(rdr: &mut StringReader,
     });
 }
 
-fn read_line_comments(rdr: &mut StringReader,
+fn read_line_comments(rdr: &mut StringReader<'_>,
                       code_to_the_left: bool,
                       comments: &mut Vec<Comment>) {
     debug!(">>> line comments");
@@ -222,7 +227,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col:
     lines.push(s1);
 }
 
-fn read_block_comment(rdr: &mut StringReader,
+fn read_block_comment(rdr: &mut StringReader<'_>,
                       code_to_the_left: bool,
                       comments: &mut Vec<Comment>) {
     debug!(">>> block comment");
@@ -312,7 +317,7 @@ fn read_block_comment(rdr: &mut StringReader,
 }
 
 
-fn consume_comment(rdr: &mut StringReader,
+fn consume_comment(rdr: &mut StringReader<'_>,
                    comments: &mut Vec<Comment>,
                    code_to_the_left: &mut bool,
                    anything_to_the_left: &mut bool) {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 06f9162a400..2e3233c8ed8 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,9 +1,10 @@
-use ast::{self, Ident};
+use crate::ast::{self, Ident};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
+use crate::parse::{token, ParseSess};
+use crate::symbol::{Symbol, keywords};
+
 use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
-use source_map::{SourceMap, FilePathMapping};
-use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
-use parse::{token, ParseSess};
-use symbol::{Symbol, keywords};
 use core::unicode::property::Pattern_White_Space;
 
 use std::borrow::Cow;
@@ -11,6 +12,7 @@ use std::char;
 use std::iter;
 use std::mem::replace;
 use rustc_data_structures::sync::Lrc;
+use log::debug;
 
 pub mod comments;
 mod tokentrees;
@@ -449,7 +451,7 @@ impl<'a> StringReader<'a> {
         }
         return s.into();
 
-        fn translate_crlf_(rdr: &StringReader,
+        fn translate_crlf_(rdr: &StringReader<'_>,
                            start: BytePos,
                            s: &str,
                            mut j: usize,
@@ -1866,19 +1868,20 @@ fn char_at(s: &str, byte: usize) -> char {
 mod tests {
     use super::*;
 
-    use ast::{Ident, CrateConfig};
-    use symbol::Symbol;
-    use syntax_pos::{BytePos, Span, NO_EXPANSION};
-    use source_map::SourceMap;
-    use errors;
-    use feature_gate::UnstableFeatures;
-    use parse::token;
+    use crate::ast::{Ident, CrateConfig};
+    use crate::symbol::Symbol;
+    use crate::source_map::SourceMap;
+    use crate::errors;
+    use crate::feature_gate::UnstableFeatures;
+    use crate::parse::token;
+    use crate::diagnostics::plugin::ErrorMap;
+    use crate::with_globals;
     use std::io;
     use std::path::PathBuf;
-    use diagnostics::plugin::ErrorMap;
+    use syntax_pos::{BytePos, Span, NO_EXPANSION};
     use rustc_data_structures::fx::FxHashSet;
     use rustc_data_structures::sync::Lock;
-    use with_globals;
+
     fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
         let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
                                                           Some(sm.clone()),
@@ -1943,7 +1946,7 @@ mod tests {
 
     // check that the given reader produces the desired stream
     // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader, expected: Vec<token::Token>) {
+    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<token::Token>) {
         for expected_tok in &expected {
             assert_eq!(&string_reader.next_token().tok, expected_tok);
         }
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index d219f29f06c..7699d9eab22 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,7 +1,7 @@
-use print::pprust::token_to_string;
-use parse::lexer::StringReader;
-use parse::{token, PResult};
-use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
+use crate::print::pprust::token_to_string;
+use crate::parse::lexer::StringReader;
+use crate::parse::{token, PResult};
+use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs
index 7da4284c0e4..75862178169 100644
--- a/src/libsyntax/parse/lexer/unicode_chars.rs
+++ b/src/libsyntax/parse/lexer/unicode_chars.rs
@@ -2,7 +2,7 @@
 // http://www.unicode.org/Public/security/10.0.0/confusables.txt
 
 use syntax_pos::{Span, NO_EXPANSION};
-use errors::{Applicability, DiagnosticBuilder};
+use crate::errors::{Applicability, DiagnosticBuilder};
 use super::StringReader;
 
 const UNICODE_ARRAY: &[(char, &str, char)] = &[
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index c7330004d6d..c723d591f2f 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -1,16 +1,18 @@
 //! The main parser interface
 
+use crate::ast::{self, CrateConfig, NodeId};
+use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
+use crate::feature_gate::UnstableFeatures;
+use crate::parse::parser::Parser;
+use crate::symbol::Symbol;
+use crate::tokenstream::{TokenStream, TokenTree};
+use crate::diagnostics::plugin::ErrorMap;
+
 use rustc_data_structures::sync::{Lrc, Lock};
-use ast::{self, CrateConfig, NodeId};
-use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
-use source_map::{SourceMap, FilePathMapping};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
-use feature_gate::UnstableFeatures;
-use parse::parser::Parser;
-use symbol::Symbol;
-use tokenstream::{TokenStream, TokenTree};
-use diagnostics::plugin::ErrorMap;
+use log::debug;
 
 use rustc_data_structures::fx::FxHashSet;
 use std::borrow::Cow;
@@ -125,12 +127,12 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
 }
 
 pub fn parse_crate_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                       -> PResult<ast::Crate> {
+                                       -> PResult<'_, ast::Crate> {
     new_parser_from_source_str(sess, name, source).parse_crate_mod()
 }
 
 pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                             -> PResult<Vec<ast::Attribute>> {
+                                             -> PResult<'_, Vec<ast::Attribute>> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
@@ -142,14 +144,14 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
 
 /// Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-                                      -> Parser {
+                                      -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
 }
 
 /// Create a new parser from a source string. Returns any buffered errors from lexing the initial
 /// token stream.
 pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-    -> Result<Parser, Vec<Diagnostic>>
+    -> Result<Parser<'_>, Vec<Diagnostic>>
 {
     let mut parser = maybe_source_file_to_parser(sess,
                                                  sess.source_map().new_source_file(name, source))?;
@@ -186,7 +188,7 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 }
 
 /// Given a source_file and config, return a parser
-fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
+fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic,
                      maybe_source_file_to_parser(sess, source_file))
 }
@@ -194,7 +196,7 @@ fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Par
 /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
 /// initial token stream.
 fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
-    -> Result<Parser, Vec<Diagnostic>>
+    -> Result<Parser<'_>, Vec<Diagnostic>>
 {
     let end_pos = source_file.end_pos;
     let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
@@ -208,7 +210,7 @@ fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
     stream_to_parser(sess, tts.into_iter().collect())
 }
 
@@ -270,7 +272,7 @@ pub fn maybe_file_to_stream(sess: &ParseSess,
 }
 
 /// Given stream and the `ParseSess`, produce a parser
-pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
     Parser::new(sess, stream, None, true, false)
 }
 
@@ -758,22 +760,22 @@ impl SeqSep {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use crate::ast::{self, Ident, PatKind};
+    use crate::attr::first_attr_value_str_by_name;
+    use crate::ptr::P;
+    use crate::print::pprust::item_to_string;
+    use crate::tokenstream::{DelimSpan, TokenTree};
+    use crate::util::parser_testing::string_to_stream;
+    use crate::util::parser_testing::{string_to_expr, string_to_item};
+    use crate::with_globals;
     use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
-    use ast::{self, Ident, PatKind};
-    use attr::first_attr_value_str_by_name;
-    use ptr::P;
-    use print::pprust::item_to_string;
-    use tokenstream::{DelimSpan, TokenTree};
-    use util::parser_testing::string_to_stream;
-    use util::parser_testing::{string_to_expr, string_to_item};
-    use with_globals;
 
     /// Parses an item.
     ///
     /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
     /// when a syntax error occurred.
     fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                        -> PResult<Option<P<ast::Item>>> {
+                                        -> PResult<'_, Option<P<ast::Item>>> {
         new_parser_from_source_str(sess, name, source).parse_item()
     }
 
@@ -913,20 +915,20 @@ mod tests {
         struct PatIdentVisitor {
             spans: Vec<Span>
         }
-        impl<'a> ::visit::Visitor<'a> for PatIdentVisitor {
+        impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
             fn visit_pat(&mut self, p: &'a ast::Pat) {
                 match p.node {
                     PatKind::Ident(_ , ref spannedident, _) => {
                         self.spans.push(spannedident.span.clone());
                     }
                     _ => {
-                        ::visit::walk_pat(self, p);
+                        crate::visit::walk_pat(self, p);
                     }
                 }
             }
         }
         let mut v = PatIdentVisitor { spans: Vec::new() };
-        ::visit::walk_item(&mut v, &item);
+        crate::visit::walk_item(&mut v, &item);
         return v.spans;
     }
 
@@ -1007,7 +1009,7 @@ mod tests {
     fn ttdelim_span() {
         fn parse_expr_from_source_str(
             name: FileName, source: String, sess: &ParseSess
-        ) -> PResult<P<ast::Expr>> {
+        ) -> PResult<'_, P<ast::Expr>> {
             new_parser_from_source_str(sess, name, source).parse_expr()
         }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index bbd1770e9c6..cacdab980fa 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -1,53 +1,55 @@
+use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
+use crate::ast::{GenericBound, TraitBoundModifier};
+use crate::ast::Unsafety;
+use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
+use crate::ast::Block;
+use crate::ast::{BlockCheckMode, CaptureBy, Movability};
+use crate::ast::{Constness, Crate};
+use crate::ast::Defaultness;
+use crate::ast::EnumDef;
+use crate::ast::{Expr, ExprKind, RangeLimits};
+use crate::ast::{Field, FnDecl, FnHeader};
+use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
+use crate::ast::{GenericParam, GenericParamKind};
+use crate::ast::GenericArg;
+use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
+use crate::ast::{Label, Lifetime, Lit, LitKind};
+use crate::ast::Local;
+use crate::ast::MacStmtStyle;
+use crate::ast::{Mac, Mac_, MacDelimiter};
+use crate::ast::{MutTy, Mutability};
+use crate::ast::{Pat, PatKind, PathSegment};
+use crate::ast::{PolyTraitRef, QSelf};
+use crate::ast::{Stmt, StmtKind};
+use crate::ast::{VariantData, StructField};
+use crate::ast::StrStyle;
+use crate::ast::SelfKind;
+use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
+use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds};
+use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
+use crate::ast::{UseTree, UseTreeKind};
+use crate::ast::{BinOpKind, UnOp};
+use crate::ast::{RangeEnd, RangeSyntax};
+use crate::{ast, attr};
+use crate::ext::base::DummyResult;
+use crate::source_map::{self, SourceMap, Spanned, respan};
+use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
+use crate::parse::{self, SeqSep, classify, token};
+use crate::parse::lexer::TokenAndSpan;
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::token::DelimToken;
+use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
+use crate::util::parser::{AssocOp, Fixity};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::parse::PResult;
+use crate::ThinVec;
+use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::symbol::{Symbol, keywords};
+
 use rustc_target::spec::abi::{self, Abi};
-use ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
-use ast::{GenericBound, TraitBoundModifier};
-use ast::Unsafety;
-use ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
-use ast::Block;
-use ast::{BlockCheckMode, CaptureBy, Movability};
-use ast::{Constness, Crate};
-use ast::Defaultness;
-use ast::EnumDef;
-use ast::{Expr, ExprKind, RangeLimits};
-use ast::{Field, FnDecl, FnHeader};
-use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
-use ast::{GenericParam, GenericParamKind};
-use ast::GenericArg;
-use ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
-use ast::{Label, Lifetime, Lit, LitKind};
-use ast::Local;
-use ast::MacStmtStyle;
-use ast::{Mac, Mac_, MacDelimiter};
-use ast::{MutTy, Mutability};
-use ast::{Pat, PatKind, PathSegment};
-use ast::{PolyTraitRef, QSelf};
-use ast::{Stmt, StmtKind};
-use ast::{VariantData, StructField};
-use ast::StrStyle;
-use ast::SelfKind;
-use ast::{TraitItem, TraitRef, TraitObjectSyntax};
-use ast::{Ty, TyKind, TypeBinding, GenericBounds};
-use ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
-use ast::{UseTree, UseTreeKind};
-use ast::{BinOpKind, UnOp};
-use ast::{RangeEnd, RangeSyntax};
-use {ast, attr};
-use ext::base::DummyResult;
-use source_map::{self, SourceMap, Spanned, respan};
 use syntax_pos::{self, Span, MultiSpan, BytePos, FileName};
-use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
-use parse::{self, SeqSep, classify, token};
-use parse::lexer::TokenAndSpan;
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::token::DelimToken;
-use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
-use util::parser::{AssocOp, Fixity};
-use print::pprust;
-use ptr::P;
-use parse::PResult;
-use ThinVec;
-use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use symbol::{Symbol, keywords};
+use log::{debug, trace};
 
 use std::borrow::Cow;
 use std::cmp;
@@ -64,7 +66,7 @@ pub enum AliasKind {
     Existential(GenericBounds),
 }
 
-bitflags! {
+bitflags::bitflags! {
     struct Restrictions: u8 {
         const STMT_EXPR         = 1 << 0;
         const NO_STRUCT_LITERAL = 1 << 1;
@@ -453,7 +455,7 @@ pub enum Error {
 impl Error {
     fn span_err<S: Into<MultiSpan>>(self,
                                         sp: S,
-                                        handler: &errors::Handler) -> DiagnosticBuilder {
+                                        handler: &errors::Handler) -> DiagnosticBuilder<'_> {
         match self {
             Error::FileNotFoundForModule { ref mod_name,
                                            ref default_path,
@@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> {
         self.sess.span_diagnostic.span_bug(sp, m)
     }
 
-    fn cancel(&self, err: &mut DiagnosticBuilder) {
+    fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
         self.sess.span_diagnostic.cancel(err)
     }
 
@@ -1721,7 +1723,7 @@ impl<'a> Parser<'a> {
         match ty.node {
             TyKind::Rptr(ref lifetime, ref mut_ty) => {
                 let sum_with_parens = pprust::to_string(|s| {
-                    use print::pprust::PrintState;
+                    use crate::print::pprust::PrintState;
 
                     s.s.word("&")?;
                     s.print_opt_lifetime(lifetime)?;
@@ -3063,7 +3065,7 @@ impl<'a> Parser<'a> {
                             None => continue,
                         };
                         let sugg = pprust::to_string(|s| {
-                            use print::pprust::PrintState;
+                            use crate::print::pprust::PrintState;
                             s.popen()?;
                             s.print_expr(&e)?;
                             s.s.word( ".")?;
@@ -5220,7 +5222,7 @@ impl<'a> Parser<'a> {
                         stmt_span = stmt_span.with_hi(self.prev_span.hi());
                     }
                     let sugg = pprust::to_string(|s| {
-                        use print::pprust::{PrintState, INDENT_UNIT};
+                        use crate::print::pprust::{PrintState, INDENT_UNIT};
                         s.ibox(INDENT_UNIT)?;
                         s.bopen()?;
                         s.print_stmt(&stmt)?;
@@ -7050,7 +7052,7 @@ impl<'a> Parser<'a> {
     /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
     fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
         let (in_cfg, outer_attrs) = {
-            let mut strip_unconfigured = ::config::StripUnconfigured {
+            let mut strip_unconfigured = crate::config::StripUnconfigured {
                 sess: self.sess,
                 features: None, // don't perform gated feature checking
             };
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 5181bb8f34e..3b1fa5ea01f 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,22 +1,26 @@
-pub use self::BinOpToken::*;
-pub use self::Nonterminal::*;
-pub use self::DelimToken::*;
-pub use self::Lit::*;
-pub use self::Token::*;
-
-use ast::{self};
-use parse::ParseSess;
-use print::pprust;
-use ptr::P;
+pub use BinOpToken::*;
+pub use Nonterminal::*;
+pub use DelimToken::*;
+pub use Lit::*;
+pub use Token::*;
+
+use crate::ast::{self};
+use crate::parse::ParseSess;
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::syntax::parse::parse_stream_from_source_str;
+use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+
 use serialize::{Decodable, Decoder, Encodable, Encoder};
-use symbol::keywords;
-use syntax::parse::parse_stream_from_source_str;
-use syntax_pos::{self, Span, FileName};
 use syntax_pos::symbol::{self, Symbol};
-use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+use syntax_pos::{self, Span, FileName};
+use log::info;
 
 use std::{cmp, fmt};
 use std::mem;
+#[cfg(target_arch = "x86_64")]
+use rustc_data_structures::static_assert;
 use rustc_data_structures::sync::{Lrc, Lock};
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -683,7 +687,7 @@ impl PartialEq for Nonterminal {
 }
 
 impl fmt::Debug for Nonterminal {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             NtItem(..) => f.pad("NtItem(..)"),
             NtBlock(..) => f.pad("NtBlock(..)"),
@@ -729,7 +733,7 @@ impl PartialEq for LazyTokenStream {
 }
 
 impl fmt::Debug for LazyTokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Debug::fmt(&self.clone().0.into_inner(), f)
     }
 }
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 5232b8333e6..2d837cb565b 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -138,6 +138,7 @@ use std::collections::VecDeque;
 use std::fmt;
 use std::io;
 use std::borrow::Cow;
+use log::debug;
 
 /// How to break. Described in more detail in the module docs.
 #[derive(Clone, Copy, PartialEq)]
@@ -192,7 +193,7 @@ impl Token {
 }
 
 impl fmt::Display for Token {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             Token::String(ref s, len) => write!(f, "STR({},{})", s, len),
             Token::Break(_) => f.write_str("BREAK"),
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 7cecf4b9af7..c7c4c4f1620 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -1,21 +1,22 @@
+use crate::ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
+use crate::ast::{SelfKind, GenericBound, TraitBoundModifier};
+use crate::ast::{Attribute, MacDelimiter, GenericArg};
+use crate::util::parser::{self, AssocOp, Fixity};
+use crate::attr;
+use crate::source_map::{self, SourceMap, Spanned};
+use crate::parse::token::{self, BinOpToken, Token};
+use crate::parse::lexer::comments;
+use crate::parse::{self, ParseSess};
+use crate::print::pp::{self, Breaks};
+use crate::print::pp::Breaks::{Consistent, Inconsistent};
+use crate::ptr::P;
+use crate::std_inject;
+use crate::symbol::keywords;
+use crate::tokenstream::{self, TokenStream, TokenTree};
+
 use rustc_target::spec::abi::{self, Abi};
-use ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
-use ast::{SelfKind, GenericBound, TraitBoundModifier};
-use ast::{Attribute, MacDelimiter, GenericArg};
-use util::parser::{self, AssocOp, Fixity};
-use attr;
-use source_map::{self, SourceMap, Spanned};
 use syntax_pos::{self, BytePos};
-use parse::token::{self, BinOpToken, Token};
-use parse::lexer::comments;
-use parse::{self, ParseSess};
-use print::pp::{self, Breaks};
-use print::pp::Breaks::{Consistent, Inconsistent};
-use ptr::P;
-use std_inject;
-use symbol::keywords;
 use syntax_pos::{DUMMY_SP, FileName};
-use tokenstream::{self, TokenStream, TokenTree};
 
 use std::ascii;
 use std::borrow::Cow;
@@ -34,8 +35,8 @@ pub enum AnnNode<'a> {
 }
 
 pub trait PpAnn {
-    fn pre(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) }
-    fn post(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) }
+    fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) -> io::Result<()> { Ok(()) }
+    fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) -> io::Result<()> { Ok(()) }
 }
 
 #[derive(Copy, Clone)]
@@ -150,7 +151,7 @@ impl<'a> State<'a> {
 }
 
 pub fn to_string<F>(f: F) -> String where
-    F: FnOnce(&mut State) -> io::Result<()>,
+    F: FnOnce(&mut State<'_>) -> io::Result<()>,
 {
     let mut wr = Vec::new();
     {
@@ -969,7 +970,7 @@ impl<'a> State<'a> {
                                   elts: &[T],
                                   mut op: F,
                                   mut get_span: G) -> io::Result<()> where
-        F: FnMut(&mut State, &T) -> io::Result<()>,
+        F: FnMut(&mut State<'_>, &T) -> io::Result<()>,
         G: FnMut(&T) -> syntax_pos::Span,
     {
         self.rbox(0, b)?;
@@ -3210,10 +3211,10 @@ impl<'a> State<'a> {
 mod tests {
     use super::*;
 
-    use ast;
-    use source_map;
+    use crate::ast;
+    use crate::source_map;
+    use crate::with_globals;
     use syntax_pos;
-    use with_globals;
 
     #[test]
     fn test_fun_to_string() {
diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs
index 3effe53cd29..0ec83447d52 100644
--- a/src/libsyntax/ptr.rs
+++ b/src/libsyntax/ptr.rs
@@ -129,19 +129,19 @@ impl<T: 'static + Clone> Clone for P<T> {
 }
 
 impl<T: ?Sized + Debug> Debug for P<T> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         Debug::fmt(&self.ptr, f)
     }
 }
 
 impl<T: Display> Display for P<T> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         Display::fmt(&**self, f)
     }
 }
 
 impl<T> fmt::Pointer for P<T> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Pointer::fmt(&self.ptr, f)
     }
 }
diff --git a/src/libsyntax/show_span.rs b/src/libsyntax/show_span.rs
index 4228e0c87ce..2c32771266e 100644
--- a/src/libsyntax/show_span.rs
+++ b/src/libsyntax/show_span.rs
@@ -5,10 +5,10 @@
 
 use std::str::FromStr;
 
-use ast;
-use errors;
-use visit;
-use visit::Visitor;
+use crate::ast;
+use crate::errors;
+use crate::visit;
+use crate::visit::Visitor;
 
 enum Mode {
     Expression,
diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs
index 0a46d034558..552a3d30261 100644
--- a/src/libsyntax/source_map.rs
+++ b/src/libsyntax/source_map.rs
@@ -10,7 +10,7 @@
 
 pub use syntax_pos::*;
 pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo};
-pub use self::ExpnFormat::*;
+pub use ExpnFormat::*;
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::stable_hasher::StableHasher;
@@ -22,7 +22,9 @@ use std::path::{Path, PathBuf};
 use std::env;
 use std::fs;
 use std::io;
-use errors::SourceMapper;
+use log::debug;
+
+use crate::errors::SourceMapper;
 
 /// Return the span itself if it doesn't come from a macro expansion,
 /// otherwise return the call site span up to the `enclosing_sp` by
@@ -167,7 +169,7 @@ impl SourceMap {
         Ok(self.new_source_file(filename, src))
     }
 
-    pub fn files(&self) -> MappedLockGuard<Vec<Lrc<SourceFile>>> {
+    pub fn files(&self) -> MappedLockGuard<'_, Vec<Lrc<SourceFile>>> {
         LockGuard::map(self.files.borrow(), |files| &mut files.source_files)
     }
 
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index e0770834421..5b904fa86ad 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -1,14 +1,15 @@
-use ast;
-use attr;
+use crate::ast;
+use crate::attr;
+use crate::edition::Edition;
+use crate::ext::hygiene::{Mark, SyntaxContext};
+use crate::symbol::{Symbol, keywords};
+use crate::source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan};
+use crate::ptr::P;
+use crate::tokenstream::TokenStream;
+
 use std::cell::Cell;
 use std::iter;
-use edition::Edition;
-use ext::hygiene::{Mark, SyntaxContext};
-use symbol::{Symbol, keywords};
 use syntax_pos::{DUMMY_SP, Span};
-use source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan};
-use ptr::P;
-use tokenstream::TokenStream;
 
 /// Craft a span that will be ignored by the stability lint's
 /// call to source_map's `is_internal` check.
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 12f82a01dcf..703c4f2db34 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -3,33 +3,35 @@
 #![allow(dead_code)]
 #![allow(unused_imports)]
 
-use self::HasTestSignature::*;
+use HasTestSignature::*;
 
 use std::iter;
 use std::slice;
 use std::mem;
 use std::vec;
-use attr::{self, HasAttrs};
+
+use log::debug;
+use smallvec::{smallvec, SmallVec};
 use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos};
 
-use source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan};
-use errors;
-use config;
-use entry::{self, EntryPointType};
-use ext::base::{ExtCtxt, Resolver};
-use ext::build::AstBuilder;
-use ext::expand::ExpansionConfig;
-use ext::hygiene::{self, Mark, SyntaxContext};
-use mut_visit::{*, ExpectOne};
-use feature_gate::Features;
-use util::map_in_place::MapInPlace;
-use parse::{token, ParseSess};
-use print::pprust;
-use ast::{self, Ident};
-use ptr::P;
-use smallvec::SmallVec;
-use symbol::{self, Symbol, keywords};
-use ThinVec;
+use crate::attr::{self, HasAttrs};
+use crate::source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan};
+use crate::errors;
+use crate::config;
+use crate::entry::{self, EntryPointType};
+use crate::ext::base::{ExtCtxt, Resolver};
+use crate::ext::build::AstBuilder;
+use crate::ext::expand::ExpansionConfig;
+use crate::ext::hygiene::{self, Mark, SyntaxContext};
+use crate::mut_visit::{*, ExpectOne};
+use crate::feature_gate::Features;
+use crate::util::map_in_place::MapInPlace;
+use crate::parse::{token, ParseSess};
+use crate::print::pprust;
+use crate::ast::{self, Ident};
+use crate::ptr::P;
+use crate::symbol::{self, Symbol, keywords};
+use crate::ThinVec;
 
 struct Test {
     span: Span,
@@ -210,7 +212,7 @@ impl MutVisitor for EntryPointCleaner {
 /// Each tested submodule will contain a similar reexport module that we will export
 /// under the name of the original module. That is, `submod::__test_reexports` is
 /// reexported like so `pub use submod::__test_reexports as submod`.
-fn mk_reexport_mod(cx: &mut TestCtxt,
+fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
                    parent: ast::NodeId,
                    tests: Vec<Ident>,
                    tested_submods: Vec<(Ident, Ident)>)
@@ -299,7 +301,7 @@ fn generate_test_harness(sess: &ParseSess,
 /// Craft a span that will be ignored by the stability lint's
 /// call to source_map's `is_internal` check.
 /// The expanded code calls some unstable functions in the test crate.
-fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
+fn ignored_span(cx: &TestCtxt<'_>, sp: Span) -> Span {
     sp.with_ctxt(cx.ctxt)
 }
 
@@ -318,7 +320,7 @@ enum BadTestSignature {
 
 /// Creates a function item for use as the main function of a test build.
 /// This function will call the `test_runner` as specified by the crate attribute
-fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
+fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
     // Writing this out by hand with 'ignored_span':
     //        pub fn main() {
     //            #![main]
@@ -398,7 +400,7 @@ fn path_name_i(idents: &[Ident]) -> String {
 
 /// Creates a slice containing every test like so:
 /// &[path::to::test1, path::to::test2]
-fn mk_tests_slice(cx: &TestCtxt) -> P<ast::Expr> {
+fn mk_tests_slice(cx: &TestCtxt<'_>) -> P<ast::Expr> {
     debug!("building test vector from {} tests", cx.test_cases.len());
     let ref ecx = cx.ext_cx;
 
@@ -410,7 +412,7 @@ fn mk_tests_slice(cx: &TestCtxt) -> P<ast::Expr> {
 }
 
 /// Creates a path from the top-level __test module to the test via __test_reexports
-fn visible_path(cx: &TestCtxt, path: &[Ident]) -> Vec<Ident>{
+fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{
     let mut visible_path = vec![];
     match cx.toplevel_reexport {
         Some(id) => visible_path.push(id),
diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs
index 26b4762aaf3..add4d2bead1 100644
--- a/src/libsyntax/test_snippet.rs
+++ b/src/libsyntax/test_snippet.rs
@@ -1,6 +1,8 @@
-use source_map::{SourceMap, FilePathMapping};
-use errors::Handler;
-use errors::emitter::EmitterWriter;
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::Handler;
+use crate::errors::emitter::EmitterWriter;
+use crate::with_globals;
+
 use std::io;
 use std::io::prelude::*;
 use rustc_data_structures::sync::Lrc;
@@ -8,7 +10,6 @@ use std::str;
 use std::sync::{Arc, Mutex};
 use std::path::Path;
 use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
-use with_globals;
 
 /// Identify a position in the text by the Nth occurrence of a string.
 struct Position {
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 2b950b46232..b6e4d4cd976 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -12,12 +12,15 @@
 //! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
+use crate::ext::base;
+use crate::ext::tt::{macro_parser, quoted};
+use crate::parse::Directory;
+use crate::parse::token::{self, DelimToken, Token};
+use crate::print::pprust;
+
 use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
-use ext::base;
-use ext::tt::{macro_parser, quoted};
-use parse::Directory;
-use parse::token::{self, DelimToken, Token};
-use print::pprust;
+#[cfg(target_arch = "x86_64")]
+use rustc_data_structures::static_assert;
 use rustc_data_structures::sync::Lrc;
 use serialize::{Decoder, Decodable, Encoder, Encodable};
 
@@ -46,7 +49,7 @@ pub enum TokenTree {
 
 impl TokenTree {
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: TokenStream)
+    pub fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let directory = Directory {
@@ -161,7 +164,7 @@ pub enum IsJoint {
     NonJoint
 }
 
-use self::IsJoint::*;
+use IsJoint::*;
 
 impl TokenStream {
     /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
@@ -492,7 +495,7 @@ impl Cursor {
 }
 
 impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.write_str(&pprust::tokens_to_string(self.clone()))
     }
 }
@@ -546,11 +549,11 @@ impl DelimSpan {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use syntax::ast::Ident;
-    use with_globals;
+    use crate::syntax::ast::Ident;
+    use crate::with_globals;
+    use crate::parse::token::Token;
+    use crate::util::parser_testing::string_to_stream;
     use syntax_pos::{Span, BytePos, NO_EXPANSION};
-    use parse::token::Token;
-    use util::parser_testing::string_to_stream;
 
     fn string_to_ts(string: &str) -> TokenStream {
         string_to_stream(string.to_owned())
diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs
index d6d2251b5bc..c989fc7a5b8 100644
--- a/src/libsyntax/util/lev_distance.rs
+++ b/src/libsyntax/util/lev_distance.rs
@@ -1,5 +1,5 @@
 use std::cmp;
-use symbol::Symbol;
+use crate::symbol::Symbol;
 
 /// Find the Levenshtein distance between two strings
 pub fn lev_distance(a: &str, b: &str) -> usize {
@@ -101,7 +101,7 @@ fn test_lev_distance() {
 
 #[test]
 fn test_find_best_match_for_name() {
-    use with_globals;
+    use crate::with_globals;
     with_globals(|| {
         let input = vec![Symbol::intern("aaab"), Symbol::intern("aaabc")];
         assert_eq!(
diff --git a/src/libsyntax/util/node_count.rs b/src/libsyntax/util/node_count.rs
index 7dd213ae38b..521edac8f5f 100644
--- a/src/libsyntax/util/node_count.rs
+++ b/src/libsyntax/util/node_count.rs
@@ -1,7 +1,7 @@
 // Simply gives a rought count of the number of nodes in an AST.
 
-use visit::*;
-use ast::*;
+use crate::visit::*;
+use crate::ast::*;
 use syntax_pos::Span;
 
 pub struct NodeCounter {
@@ -69,7 +69,7 @@ impl<'ast> Visitor<'ast> for NodeCounter {
         self.count += 1;
         walk_generics(self, g)
     }
-    fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, s: Span, _: NodeId) {
+    fn visit_fn(&mut self, fk: FnKind<'_>, fd: &FnDecl, s: Span, _: NodeId) {
         self.count += 1;
         walk_fn(self, fk, fd, s)
     }
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index 89d4e53b8d1..61729a08060 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -1,6 +1,6 @@
-use parse::token::{Token, BinOpToken};
-use symbol::keywords;
-use ast::{self, BinOpKind};
+use crate::parse::token::{Token, BinOpToken};
+use crate::symbol::keywords;
+use crate::ast::{self, BinOpKind};
 
 /// Associative operator with precedence.
 ///
@@ -72,7 +72,7 @@ pub enum Fixity {
 impl AssocOp {
     /// Create a new AssocOP from a token
     pub fn from_token(t: &Token) -> Option<AssocOp> {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *t {
             Token::BinOpEq(k) => Some(AssignOp(k)),
             Token::LArrow => Some(ObsoleteInPlace),
@@ -107,7 +107,7 @@ impl AssocOp {
 
     /// Create a new AssocOp from ast::BinOpKind.
     pub fn from_ast_binop(op: BinOpKind) -> Self {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match op {
             BinOpKind::Lt => Less,
             BinOpKind::Gt => Greater,
@@ -132,7 +132,7 @@ impl AssocOp {
 
     /// Gets the precedence of this operator
     pub fn precedence(&self) -> usize {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             As | Colon => 14,
             Multiply | Divide | Modulus => 13,
@@ -152,7 +152,7 @@ impl AssocOp {
 
     /// Gets the fixity of this operator
     pub fn fixity(&self) -> Fixity {
-        use self::AssocOp::*;
+        use AssocOp::*;
         // NOTE: it is a bug to have an operators that has same precedence but different fixities!
         match *self {
             ObsoleteInPlace | Assign | AssignOp(_) => Fixity::Right,
@@ -164,7 +164,7 @@ impl AssocOp {
     }
 
     pub fn is_comparison(&self) -> bool {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual => true,
             ObsoleteInPlace | Assign | AssignOp(_) | As | Multiply | Divide | Modulus | Add |
@@ -174,7 +174,7 @@ impl AssocOp {
     }
 
     pub fn is_assign_like(&self) -> bool {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             Assign | AssignOp(_) | ObsoleteInPlace => true,
             Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | As | Multiply | Divide |
@@ -184,7 +184,7 @@ impl AssocOp {
     }
 
     pub fn to_ast_binop(&self) -> Option<BinOpKind> {
-        use self::AssocOp::*;
+        use AssocOp::*;
         match *self {
             Less => Some(BinOpKind::Lt),
             Greater => Some(BinOpKind::Gt),
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index d0b3cd865ce..dbe2b8d39f2 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -1,10 +1,11 @@
-use ast::{self, Ident};
-use source_map::FilePathMapping;
-use parse::{ParseSess, PResult, source_file_to_stream};
-use parse::{lexer, new_parser_from_source_str};
-use parse::parser::Parser;
-use ptr::P;
-use tokenstream::TokenStream;
+use crate::ast::{self, Ident};
+use crate::source_map::FilePathMapping;
+use crate::parse::{ParseSess, PResult, source_file_to_stream};
+use crate::parse::{lexer, new_parser_from_source_str};
+use crate::parse::parser::Parser;
+use crate::ptr::P;
+use crate::tokenstream::TokenStream;
+
 use std::iter::Peekable;
 use std::path::PathBuf;
 
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 8cbd47ca70f..acbb58a66b6 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -13,10 +13,11 @@
 //! instance, a walker looking for item names in a module will miss all of
 //! those that are created by the expansion of a macro.
 
-use ast::*;
+use crate::ast::*;
+use crate::parse::token::Token;
+use crate::tokenstream::{TokenTree, TokenStream};
+
 use syntax_pos::Span;
-use parse::token::Token;
-use tokenstream::{TokenTree, TokenStream};
 
 #[derive(Copy, Clone)]
 pub enum FnKind<'a> {