about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorTaiki Endo <te316e89@gmail.com>2019-02-07 02:33:01 +0900
committerTaiki Endo <te316e89@gmail.com>2019-02-07 02:33:01 +0900
commit7bb082d27fe472f52b103de0ae9fc6fa7e6546cc (patch)
treedfed08e00fc6e88022fd7249bd5017e5d57110a7 /src/libsyntax/parse
parent2596bc1368d1e3d34c9a7841ad87a3100f01cbad (diff)
downloadrust-7bb082d27fe472f52b103de0ae9fc6fa7e6546cc.tar.gz
rust-7bb082d27fe472f52b103de0ae9fc6fa7e6546cc.zip
libsyntax => 2018
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs18
-rw-r--r--src/libsyntax/parse/classify.rs2
-rw-r--r--src/libsyntax/parse/lexer/comments.rs29
-rw-r--r--src/libsyntax/parse/lexer/mod.rs35
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs8
-rw-r--r--src/libsyntax/parse/lexer/unicode_chars.rs2
-rw-r--r--src/libsyntax/parse/mod.rs62
-rw-r--r--src/libsyntax/parse/parser.rs112
-rw-r--r--src/libsyntax/parse/token.rs36
9 files changed, 161 insertions, 143 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 914a0667ebf..b36ca0574cb 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -1,10 +1,12 @@
-use attr;
-use ast;
-use source_map::respan;
-use parse::{SeqSep, PResult};
-use parse::token::{self, Nonterminal, DelimToken};
-use parse::parser::{Parser, TokenType, PathStyle};
-use tokenstream::{TokenStream, TokenTree};
+use crate::attr;
+use crate::ast;
+use crate::source_map::respan;
+use crate::parse::{SeqSep, PResult};
+use crate::parse::token::{self, Nonterminal, DelimToken};
+use crate::parse::parser::{Parser, TokenType, PathStyle};
+use crate::tokenstream::{TokenStream, TokenTree};
+
+use log::debug;
 
 #[derive(Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -74,7 +76,7 @@ impl<'a> Parser<'a> {
     /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy`
     /// that prescribes how to handle inner attributes.
     fn parse_attribute_with_inner_parse_policy(&mut self,
-                                               inner_parse_policy: InnerAttributeParsePolicy)
+                                               inner_parse_policy: InnerAttributeParsePolicy<'_>)
                                                -> PResult<'a, ast::Attribute> {
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index a1cdfd9146a..b4103440e35 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -2,7 +2,7 @@
 
 // Predicates on exprs and stmts that the pretty-printer and parser use
 
-use ast;
+use crate::ast;
 
 /// Does this expression require a semicolon to be treated
 /// as a statement? The negation of this: 'can this expression
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index ffc480d829d..4632d814d5c 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -1,11 +1,13 @@
-pub use self::CommentStyle::*;
+pub use CommentStyle::*;
+
+use crate::ast;
+use crate::source_map::SourceMap;
+use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
+use crate::parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
+use crate::print::pprust;
 
-use ast;
-use source_map::SourceMap;
 use syntax_pos::{BytePos, CharPos, Pos, FileName};
-use parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
-use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
-use print::pprust;
+use log::debug;
 
 use std::io::Read;
 use std::usize;
@@ -135,7 +137,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
     panic!("not a doc-comment: {}", comment);
 }
 
-fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
+fn push_blank_line_comment(rdr: &StringReader<'_>, comments: &mut Vec<Comment>) {
     debug!(">>> blank-line comment");
     comments.push(Comment {
         style: BlankLine,
@@ -144,7 +146,10 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
     });
 }
 
-fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) {
+fn consume_whitespace_counting_blank_lines(
+    rdr: &mut StringReader<'_>,
+    comments: &mut Vec<Comment>
+) {
     while is_pattern_whitespace(rdr.ch) && !rdr.is_eof() {
         if rdr.ch_is('\n') {
             push_blank_line_comment(rdr, &mut *comments);
@@ -153,7 +158,7 @@ fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mu
     }
 }
 
-fn read_shebang_comment(rdr: &mut StringReader,
+fn read_shebang_comment(rdr: &mut StringReader<'_>,
                         code_to_the_left: bool,
                         comments: &mut Vec<Comment>) {
     debug!(">>> shebang comment");
@@ -166,7 +171,7 @@ fn read_shebang_comment(rdr: &mut StringReader,
     });
 }
 
-fn read_line_comments(rdr: &mut StringReader,
+fn read_line_comments(rdr: &mut StringReader<'_>,
                       code_to_the_left: bool,
                       comments: &mut Vec<Comment>) {
     debug!(">>> line comments");
@@ -222,7 +227,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col:
     lines.push(s1);
 }
 
-fn read_block_comment(rdr: &mut StringReader,
+fn read_block_comment(rdr: &mut StringReader<'_>,
                       code_to_the_left: bool,
                       comments: &mut Vec<Comment>) {
     debug!(">>> block comment");
@@ -312,7 +317,7 @@ fn read_block_comment(rdr: &mut StringReader,
 }
 
 
-fn consume_comment(rdr: &mut StringReader,
+fn consume_comment(rdr: &mut StringReader<'_>,
                    comments: &mut Vec<Comment>,
                    code_to_the_left: &mut bool,
                    anything_to_the_left: &mut bool) {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 06f9162a400..2e3233c8ed8 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1,9 +1,10 @@
-use ast::{self, Ident};
+use crate::ast::{self, Ident};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
+use crate::parse::{token, ParseSess};
+use crate::symbol::{Symbol, keywords};
+
 use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
-use source_map::{SourceMap, FilePathMapping};
-use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
-use parse::{token, ParseSess};
-use symbol::{Symbol, keywords};
 use core::unicode::property::Pattern_White_Space;
 
 use std::borrow::Cow;
@@ -11,6 +12,7 @@ use std::char;
 use std::iter;
 use std::mem::replace;
 use rustc_data_structures::sync::Lrc;
+use log::debug;
 
 pub mod comments;
 mod tokentrees;
@@ -449,7 +451,7 @@ impl<'a> StringReader<'a> {
         }
         return s.into();
 
-        fn translate_crlf_(rdr: &StringReader,
+        fn translate_crlf_(rdr: &StringReader<'_>,
                            start: BytePos,
                            s: &str,
                            mut j: usize,
@@ -1866,19 +1868,20 @@ fn char_at(s: &str, byte: usize) -> char {
 mod tests {
     use super::*;
 
-    use ast::{Ident, CrateConfig};
-    use symbol::Symbol;
-    use syntax_pos::{BytePos, Span, NO_EXPANSION};
-    use source_map::SourceMap;
-    use errors;
-    use feature_gate::UnstableFeatures;
-    use parse::token;
+    use crate::ast::{Ident, CrateConfig};
+    use crate::symbol::Symbol;
+    use crate::source_map::SourceMap;
+    use crate::errors;
+    use crate::feature_gate::UnstableFeatures;
+    use crate::parse::token;
+    use crate::diagnostics::plugin::ErrorMap;
+    use crate::with_globals;
     use std::io;
     use std::path::PathBuf;
-    use diagnostics::plugin::ErrorMap;
+    use syntax_pos::{BytePos, Span, NO_EXPANSION};
     use rustc_data_structures::fx::FxHashSet;
     use rustc_data_structures::sync::Lock;
-    use with_globals;
+
     fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
         let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
                                                           Some(sm.clone()),
@@ -1943,7 +1946,7 @@ mod tests {
 
     // check that the given reader produces the desired stream
     // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader, expected: Vec<token::Token>) {
+    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<token::Token>) {
         for expected_tok in &expected {
             assert_eq!(&string_reader.next_token().tok, expected_tok);
         }
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index d219f29f06c..7699d9eab22 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,7 +1,7 @@
-use print::pprust::token_to_string;
-use parse::lexer::StringReader;
-use parse::{token, PResult};
-use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
+use crate::print::pprust::token_to_string;
+use crate::parse::lexer::StringReader;
+use crate::parse::{token, PResult};
+use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs
index 7da4284c0e4..75862178169 100644
--- a/src/libsyntax/parse/lexer/unicode_chars.rs
+++ b/src/libsyntax/parse/lexer/unicode_chars.rs
@@ -2,7 +2,7 @@
 // http://www.unicode.org/Public/security/10.0.0/confusables.txt
 
 use syntax_pos::{Span, NO_EXPANSION};
-use errors::{Applicability, DiagnosticBuilder};
+use crate::errors::{Applicability, DiagnosticBuilder};
 use super::StringReader;
 
 const UNICODE_ARRAY: &[(char, &str, char)] = &[
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index c7330004d6d..c723d591f2f 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -1,16 +1,18 @@
 //! The main parser interface
 
+use crate::ast::{self, CrateConfig, NodeId};
+use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
+use crate::feature_gate::UnstableFeatures;
+use crate::parse::parser::Parser;
+use crate::symbol::Symbol;
+use crate::tokenstream::{TokenStream, TokenTree};
+use crate::diagnostics::plugin::ErrorMap;
+
 use rustc_data_structures::sync::{Lrc, Lock};
-use ast::{self, CrateConfig, NodeId};
-use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
-use source_map::{SourceMap, FilePathMapping};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
-use feature_gate::UnstableFeatures;
-use parse::parser::Parser;
-use symbol::Symbol;
-use tokenstream::{TokenStream, TokenTree};
-use diagnostics::plugin::ErrorMap;
+use log::debug;
 
 use rustc_data_structures::fx::FxHashSet;
 use std::borrow::Cow;
@@ -125,12 +127,12 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
 }
 
 pub fn parse_crate_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                       -> PResult<ast::Crate> {
+                                       -> PResult<'_, ast::Crate> {
     new_parser_from_source_str(sess, name, source).parse_crate_mod()
 }
 
 pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                             -> PResult<Vec<ast::Attribute>> {
+                                             -> PResult<'_, Vec<ast::Attribute>> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
@@ -142,14 +144,14 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
 
 /// Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-                                      -> Parser {
+                                      -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
 }
 
 /// Create a new parser from a source string. Returns any buffered errors from lexing the initial
 /// token stream.
 pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-    -> Result<Parser, Vec<Diagnostic>>
+    -> Result<Parser<'_>, Vec<Diagnostic>>
 {
     let mut parser = maybe_source_file_to_parser(sess,
                                                  sess.source_map().new_source_file(name, source))?;
@@ -186,7 +188,7 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 }
 
 /// Given a source_file and config, return a parser
-fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
+fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic,
                      maybe_source_file_to_parser(sess, source_file))
 }
@@ -194,7 +196,7 @@ fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Par
 /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
 /// initial token stream.
 fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
-    -> Result<Parser, Vec<Diagnostic>>
+    -> Result<Parser<'_>, Vec<Diagnostic>>
 {
     let end_pos = source_file.end_pos;
     let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
@@ -208,7 +210,7 @@ fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
     stream_to_parser(sess, tts.into_iter().collect())
 }
 
@@ -270,7 +272,7 @@ pub fn maybe_file_to_stream(sess: &ParseSess,
 }
 
 /// Given stream and the `ParseSess`, produce a parser
-pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
     Parser::new(sess, stream, None, true, false)
 }
 
@@ -758,22 +760,22 @@ impl SeqSep {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use crate::ast::{self, Ident, PatKind};
+    use crate::attr::first_attr_value_str_by_name;
+    use crate::ptr::P;
+    use crate::print::pprust::item_to_string;
+    use crate::tokenstream::{DelimSpan, TokenTree};
+    use crate::util::parser_testing::string_to_stream;
+    use crate::util::parser_testing::{string_to_expr, string_to_item};
+    use crate::with_globals;
     use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
-    use ast::{self, Ident, PatKind};
-    use attr::first_attr_value_str_by_name;
-    use ptr::P;
-    use print::pprust::item_to_string;
-    use tokenstream::{DelimSpan, TokenTree};
-    use util::parser_testing::string_to_stream;
-    use util::parser_testing::{string_to_expr, string_to_item};
-    use with_globals;
 
     /// Parses an item.
     ///
     /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
     /// when a syntax error occurred.
     fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
-                                        -> PResult<Option<P<ast::Item>>> {
+                                        -> PResult<'_, Option<P<ast::Item>>> {
         new_parser_from_source_str(sess, name, source).parse_item()
     }
 
@@ -913,20 +915,20 @@ mod tests {
         struct PatIdentVisitor {
             spans: Vec<Span>
         }
-        impl<'a> ::visit::Visitor<'a> for PatIdentVisitor {
+        impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
             fn visit_pat(&mut self, p: &'a ast::Pat) {
                 match p.node {
                     PatKind::Ident(_ , ref spannedident, _) => {
                         self.spans.push(spannedident.span.clone());
                     }
                     _ => {
-                        ::visit::walk_pat(self, p);
+                        crate::visit::walk_pat(self, p);
                     }
                 }
             }
         }
         let mut v = PatIdentVisitor { spans: Vec::new() };
-        ::visit::walk_item(&mut v, &item);
+        crate::visit::walk_item(&mut v, &item);
         return v.spans;
     }
 
@@ -1007,7 +1009,7 @@ mod tests {
     fn ttdelim_span() {
         fn parse_expr_from_source_str(
             name: FileName, source: String, sess: &ParseSess
-        ) -> PResult<P<ast::Expr>> {
+        ) -> PResult<'_, P<ast::Expr>> {
             new_parser_from_source_str(sess, name, source).parse_expr()
         }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index bbd1770e9c6..cacdab980fa 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -1,53 +1,55 @@
+use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
+use crate::ast::{GenericBound, TraitBoundModifier};
+use crate::ast::Unsafety;
+use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
+use crate::ast::Block;
+use crate::ast::{BlockCheckMode, CaptureBy, Movability};
+use crate::ast::{Constness, Crate};
+use crate::ast::Defaultness;
+use crate::ast::EnumDef;
+use crate::ast::{Expr, ExprKind, RangeLimits};
+use crate::ast::{Field, FnDecl, FnHeader};
+use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
+use crate::ast::{GenericParam, GenericParamKind};
+use crate::ast::GenericArg;
+use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
+use crate::ast::{Label, Lifetime, Lit, LitKind};
+use crate::ast::Local;
+use crate::ast::MacStmtStyle;
+use crate::ast::{Mac, Mac_, MacDelimiter};
+use crate::ast::{MutTy, Mutability};
+use crate::ast::{Pat, PatKind, PathSegment};
+use crate::ast::{PolyTraitRef, QSelf};
+use crate::ast::{Stmt, StmtKind};
+use crate::ast::{VariantData, StructField};
+use crate::ast::StrStyle;
+use crate::ast::SelfKind;
+use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
+use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds};
+use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
+use crate::ast::{UseTree, UseTreeKind};
+use crate::ast::{BinOpKind, UnOp};
+use crate::ast::{RangeEnd, RangeSyntax};
+use crate::{ast, attr};
+use crate::ext::base::DummyResult;
+use crate::source_map::{self, SourceMap, Spanned, respan};
+use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
+use crate::parse::{self, SeqSep, classify, token};
+use crate::parse::lexer::TokenAndSpan;
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::token::DelimToken;
+use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
+use crate::util::parser::{AssocOp, Fixity};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::parse::PResult;
+use crate::ThinVec;
+use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::symbol::{Symbol, keywords};
+
 use rustc_target::spec::abi::{self, Abi};
-use ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
-use ast::{GenericBound, TraitBoundModifier};
-use ast::Unsafety;
-use ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
-use ast::Block;
-use ast::{BlockCheckMode, CaptureBy, Movability};
-use ast::{Constness, Crate};
-use ast::Defaultness;
-use ast::EnumDef;
-use ast::{Expr, ExprKind, RangeLimits};
-use ast::{Field, FnDecl, FnHeader};
-use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
-use ast::{GenericParam, GenericParamKind};
-use ast::GenericArg;
-use ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
-use ast::{Label, Lifetime, Lit, LitKind};
-use ast::Local;
-use ast::MacStmtStyle;
-use ast::{Mac, Mac_, MacDelimiter};
-use ast::{MutTy, Mutability};
-use ast::{Pat, PatKind, PathSegment};
-use ast::{PolyTraitRef, QSelf};
-use ast::{Stmt, StmtKind};
-use ast::{VariantData, StructField};
-use ast::StrStyle;
-use ast::SelfKind;
-use ast::{TraitItem, TraitRef, TraitObjectSyntax};
-use ast::{Ty, TyKind, TypeBinding, GenericBounds};
-use ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
-use ast::{UseTree, UseTreeKind};
-use ast::{BinOpKind, UnOp};
-use ast::{RangeEnd, RangeSyntax};
-use {ast, attr};
-use ext::base::DummyResult;
-use source_map::{self, SourceMap, Spanned, respan};
 use syntax_pos::{self, Span, MultiSpan, BytePos, FileName};
-use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
-use parse::{self, SeqSep, classify, token};
-use parse::lexer::TokenAndSpan;
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::token::DelimToken;
-use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
-use util::parser::{AssocOp, Fixity};
-use print::pprust;
-use ptr::P;
-use parse::PResult;
-use ThinVec;
-use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use symbol::{Symbol, keywords};
+use log::{debug, trace};
 
 use std::borrow::Cow;
 use std::cmp;
@@ -64,7 +66,7 @@ pub enum AliasKind {
     Existential(GenericBounds),
 }
 
-bitflags! {
+bitflags::bitflags! {
     struct Restrictions: u8 {
         const STMT_EXPR         = 1 << 0;
         const NO_STRUCT_LITERAL = 1 << 1;
@@ -453,7 +455,7 @@ pub enum Error {
 impl Error {
     fn span_err<S: Into<MultiSpan>>(self,
                                         sp: S,
-                                        handler: &errors::Handler) -> DiagnosticBuilder {
+                                        handler: &errors::Handler) -> DiagnosticBuilder<'_> {
         match self {
             Error::FileNotFoundForModule { ref mod_name,
                                            ref default_path,
@@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> {
         self.sess.span_diagnostic.span_bug(sp, m)
     }
 
-    fn cancel(&self, err: &mut DiagnosticBuilder) {
+    fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
         self.sess.span_diagnostic.cancel(err)
     }
 
@@ -1721,7 +1723,7 @@ impl<'a> Parser<'a> {
         match ty.node {
             TyKind::Rptr(ref lifetime, ref mut_ty) => {
                 let sum_with_parens = pprust::to_string(|s| {
-                    use print::pprust::PrintState;
+                    use crate::print::pprust::PrintState;
 
                     s.s.word("&")?;
                     s.print_opt_lifetime(lifetime)?;
@@ -3063,7 +3065,7 @@ impl<'a> Parser<'a> {
                             None => continue,
                         };
                         let sugg = pprust::to_string(|s| {
-                            use print::pprust::PrintState;
+                            use crate::print::pprust::PrintState;
                             s.popen()?;
                             s.print_expr(&e)?;
                             s.s.word( ".")?;
@@ -5220,7 +5222,7 @@ impl<'a> Parser<'a> {
                         stmt_span = stmt_span.with_hi(self.prev_span.hi());
                     }
                     let sugg = pprust::to_string(|s| {
-                        use print::pprust::{PrintState, INDENT_UNIT};
+                        use crate::print::pprust::{PrintState, INDENT_UNIT};
                         s.ibox(INDENT_UNIT)?;
                         s.bopen()?;
                         s.print_stmt(&stmt)?;
@@ -7050,7 +7052,7 @@ impl<'a> Parser<'a> {
     /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
     fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
         let (in_cfg, outer_attrs) = {
-            let mut strip_unconfigured = ::config::StripUnconfigured {
+            let mut strip_unconfigured = crate::config::StripUnconfigured {
                 sess: self.sess,
                 features: None, // don't perform gated feature checking
             };
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 5181bb8f34e..3b1fa5ea01f 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,22 +1,26 @@
-pub use self::BinOpToken::*;
-pub use self::Nonterminal::*;
-pub use self::DelimToken::*;
-pub use self::Lit::*;
-pub use self::Token::*;
-
-use ast::{self};
-use parse::ParseSess;
-use print::pprust;
-use ptr::P;
+pub use BinOpToken::*;
+pub use Nonterminal::*;
+pub use DelimToken::*;
+pub use Lit::*;
+pub use Token::*;
+
+use crate::ast::{self};
+use crate::parse::ParseSess;
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::symbol::keywords;
+use crate::syntax::parse::parse_stream_from_source_str;
+use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+
 use serialize::{Decodable, Decoder, Encodable, Encoder};
-use symbol::keywords;
-use syntax::parse::parse_stream_from_source_str;
-use syntax_pos::{self, Span, FileName};
 use syntax_pos::symbol::{self, Symbol};
-use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+use syntax_pos::{self, Span, FileName};
+use log::info;
 
 use std::{cmp, fmt};
 use std::mem;
+#[cfg(target_arch = "x86_64")]
+use rustc_data_structures::static_assert;
 use rustc_data_structures::sync::{Lrc, Lock};
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -683,7 +687,7 @@ impl PartialEq for Nonterminal {
 }
 
 impl fmt::Debug for Nonterminal {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             NtItem(..) => f.pad("NtItem(..)"),
             NtBlock(..) => f.pad("NtBlock(..)"),
@@ -729,7 +733,7 @@ impl PartialEq for LazyTokenStream {
 }
 
 impl fmt::Debug for LazyTokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Debug::fmt(&self.clone().0.into_inner(), f)
     }
 }