about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorAlexander Regueiro <alexreg@me.com>2019-09-06 03:56:45 +0100
committerAlexander Regueiro <alexreg@me.com>2019-09-07 16:29:04 +0100
commit49d2fd1725510fd3bf6f2937e178b1aa055ddb02 (patch)
treea04455dea49b29d2c981573bd920116f0418024b /src/libsyntax/parse
parentef54f57c5b9d894a38179d09b00610c1b337b086 (diff)
downloadrust-49d2fd1725510fd3bf6f2937e178b1aa055ddb02.tar.gz
rust-49d2fd1725510fd3bf6f2937e178b1aa055ddb02.zip
Aggregation of cosmetic changes made during work on REPL PRs: libsyntax
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs38
-rw-r--r--src/libsyntax/parse/diagnostics.rs76
-rw-r--r--src/libsyntax/parse/lexer/tests.rs17
-rw-r--r--src/libsyntax/parse/mod.rs53
-rw-r--r--src/libsyntax/parse/parser.rs50
-rw-r--r--src/libsyntax/parse/parser/expr.rs141
-rw-r--r--src/libsyntax/parse/parser/item.rs115
-rw-r--r--src/libsyntax/parse/parser/module.rs6
-rw-r--r--src/libsyntax/parse/parser/pat.rs4
-rw-r--r--src/libsyntax/parse/parser/stmt.rs64
-rw-r--r--src/libsyntax/parse/tests.rs8
11 files changed, 286 insertions, 286 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index d9c4baad49d..9aa1ec0b14f 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -26,7 +26,7 @@ impl<'a> Parser<'a> {
         Ok(attrs)
     }
 
-    /// Parse attributes that appear before an item
+    /// Parses attributes that appear before an item.
     crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
         let mut attrs: Vec<ast::Attribute> = Vec::new();
         let mut just_parsed_doc_comment = false;
@@ -69,10 +69,10 @@ impl<'a> Parser<'a> {
         Ok(attrs)
     }
 
-    /// Matches `attribute = # ! [ meta_item ]`
+    /// Matches `attribute = # ! [ meta_item ]`.
     ///
-    /// If permit_inner is true, then a leading `!` indicates an inner
-    /// attribute
+    /// If `permit_inner` is `true`, then a leading `!` indicates an inner
+    /// attribute.
     pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> {
         debug!("parse_attribute: permit_inner={:?} self.token={:?}",
                permit_inner,
@@ -167,14 +167,14 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse an inner part of attribute - path and following tokens.
+    /// Parses an inner part of an attribute (the path and following tokens).
     /// The tokens must be either a delimited token stream, or empty token stream,
     /// or the "legacy" key-value form.
-    /// PATH `(` TOKEN_STREAM `)`
-    /// PATH `[` TOKEN_STREAM `]`
-    /// PATH `{` TOKEN_STREAM `}`
-    /// PATH
-    /// PATH `=` TOKEN_TREE
+    ///     PATH `(` TOKEN_STREAM `)`
+    ///     PATH `[` TOKEN_STREAM `]`
+    ///     PATH `{` TOKEN_STREAM `}`
+    ///     PATH
+    ///     PATH `=` TOKEN_TREE
     /// The delimiters or `=` are still put into the resulting token stream.
     pub fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
         let meta = match self.token.kind {
@@ -217,11 +217,11 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse attributes that appear after the opening of an item. These should
+    /// Parses attributes that appear after the opening of an item. These should
     /// be preceded by an exclamation mark, but we accept and warn about one
     /// terminated by a semicolon.
-
-    /// matches inner_attrs*
+    ///
+    /// Matches `inner_attrs*`.
     crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
         let mut attrs: Vec<ast::Attribute> = vec![];
         loop {
@@ -237,7 +237,7 @@ impl<'a> Parser<'a> {
                     attrs.push(attr);
                 }
                 token::DocComment(s) => {
-                    // we need to get the position of this token before we bump.
+                    // We need to get the position of this token before we bump.
                     let attr = attr::mk_sugared_doc_attr(s, self.token.span);
                     if attr.style == ast::AttrStyle::Inner {
                         attrs.push(attr);
@@ -268,10 +268,10 @@ impl<'a> Parser<'a> {
         Ok(lit)
     }
 
-    /// Per RFC#1559, matches the following grammar:
+    /// Matches the following grammar (per RFC 1559).
     ///
-    /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
-    /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
+    ///     meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
+    ///     meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
         let nt_meta = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
@@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
+    /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
     fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
         match self.parse_unsuffixed_lit() {
             Ok(lit) => {
@@ -324,7 +324,7 @@ impl<'a> Parser<'a> {
         Err(self.diagnostic().struct_span_err(self.token.span, &msg))
     }
 
-    /// matches meta_seq = ( COMMASEP(meta_item_inner) )
+    /// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
     fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
         self.parse_seq_to_end(&token::CloseDelim(token::Paren),
                               SeqSep::trailing_allowed(token::Comma),
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index d050d4f4ce7..2890a8e721e 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -240,7 +240,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, bool /* recovered */> {
         fn tokens_to_string(tokens: &[TokenType]) -> String {
             let mut i = tokens.iter();
-            // This might be a sign we need a connect method on Iterator.
+            // This might be a sign we need a connect method on `Iterator`.
             let b = i.next()
                      .map_or(String::new(), |t| t.to_string());
             i.enumerate().fold(b, |mut b, (i, a)| {
@@ -301,7 +301,7 @@ impl<'a> Parser<'a> {
             );
         }
         let sp = if self.token == token::Eof {
-            // This is EOF, don't want to point at the following char, but rather the last token
+            // This is EOF; don't want to point at the following char, but rather the last token.
             self.prev_span
         } else {
             label_sp
@@ -317,9 +317,9 @@ impl<'a> Parser<'a> {
         }
 
         let is_semi_suggestable = expected.iter().any(|t| match t {
-            TokenType::Token(token::Semi) => true, // we expect a `;` here
+            TokenType::Token(token::Semi) => true, // We expect a `;` here.
             _ => false,
-        }) && ( // a `;` would be expected before the current keyword
+        }) && ( // A `;` would be expected before the current keyword.
             self.token.is_keyword(kw::Break) ||
             self.token.is_keyword(kw::Continue) ||
             self.token.is_keyword(kw::For) ||
@@ -541,16 +541,16 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Produce an error if comparison operators are chained (RFC #558).
-    /// We only need to check lhs, not rhs, because all comparison ops
-    /// have same precedence and are left-associative
+    /// Produces an error if comparison operators are chained (RFC #558).
+    /// We only need to check the LHS, not the RHS, because all comparison ops
+    /// have same precedence and are left-associative.
     crate fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) -> PResult<'a, ()> {
         debug_assert!(outer_op.is_comparison(),
                       "check_no_chained_comparison: {:?} is not comparison",
                       outer_op);
         match lhs.node {
             ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
-                // respan to include both operators
+                // Respan to include both operators.
                 let op_span = op.span.to(self.token.span);
                 let mut err = self.struct_span_err(
                     op_span,
@@ -691,9 +691,9 @@ impl<'a> Parser<'a> {
         Ok(())
     }
 
-    /// Try to recover from associated item paths like `[T]::AssocItem`/`(T, U)::AssocItem`.
-    /// Attempt to convert the base expression/pattern/type into a type, parse the `::AssocItem`
-    /// tail, and combine them into a `<Ty>::AssocItem` expression/pattern/type.
+    /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
+    /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
+    /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
     crate fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
         &mut self,
         base: P<T>,
@@ -708,8 +708,8 @@ impl<'a> Parser<'a> {
         Ok(base)
     }
 
-    /// Given an already parsed `Ty` parse the `::AssocItem` tail and
-    /// combine them into a `<Ty>::AssocItem` expression/pattern/type.
+    /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
+    /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
     crate fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
         &mut self,
         ty_span: Span,
@@ -730,7 +730,7 @@ impl<'a> Parser<'a> {
         self.diagnostic()
             .struct_span_err(path.span, "missing angle brackets in associated item path")
             .span_suggestion(
-                // this is a best-effort recovery
+                // This is a best-effort recovery.
                 path.span,
                 "try",
                 format!("<{}>::{}", ty_str, path),
@@ -738,7 +738,7 @@ impl<'a> Parser<'a> {
             )
             .emit();
 
-        let path_span = ty_span.shrink_to_hi(); // use an empty path since `position` == 0
+        let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
         Ok(P(T::recovered(
             Some(QSelf {
                 ty,
@@ -761,8 +761,8 @@ impl<'a> Parser<'a> {
             if !items.is_empty() {
                 let previous_item = &items[items.len() - 1];
                 let previous_item_kind_name = match previous_item.node {
-                    // say "braced struct" because tuple-structs and
-                    // braceless-empty-struct declarations do take a semicolon
+                    // Say "braced struct" because tuple-structs and
+                    // braceless-empty-struct declarations do take a semicolon.
                     ItemKind::Struct(..) => Some("braced struct"),
                     ItemKind::Enum(..) => Some("enum"),
                     ItemKind::Trait(..) => Some("trait"),
@@ -783,7 +783,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Create a `DiagnosticBuilder` for an unexpected token `t` and try to recover if it is a
+    /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
     /// closing delimiter.
     pub fn unexpected_try_recover(
         &mut self,
@@ -841,7 +841,7 @@ impl<'a> Parser<'a> {
         extern_sp: Span,
     ) -> PResult<'a, ()> {
         if self.token != token::Semi {
-            // this might be an incorrect fn definition (#62109)
+            // This might be an incorrect fn definition (#62109).
             let parser_snapshot = self.clone();
             match self.parse_inner_attrs_and_block() {
                 Ok((_, body)) => {
@@ -871,7 +871,7 @@ impl<'a> Parser<'a> {
         Ok(())
     }
 
-    /// Consume alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
+    /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
     /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
     crate fn parse_incorrect_await_syntax(
         &mut self,
@@ -924,7 +924,7 @@ impl<'a> Parser<'a> {
         sp
     }
 
-    /// If encountering `future.await()`, consume and emit error.
+    /// If encountering `future.await()`, consumes and emits an error.
     crate fn recover_from_await_method_call(&mut self) {
         if self.token == token::OpenDelim(token::Paren) &&
             self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
@@ -944,7 +944,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Recover a situation like `for ( $pat in $expr )`
+    /// Recovers a situation like `for ( $pat in $expr )`
     /// and suggest writing `for $pat in $expr` instead.
     ///
     /// This should be called before parsing the `$block`.
@@ -1010,7 +1010,7 @@ impl<'a> Parser<'a> {
             Ok(x) => x,
             Err(mut err) => {
                 err.emit();
-                // recover from parse error
+                // Recover from parse error.
                 self.consume_block(delim);
                 self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
             }
@@ -1023,7 +1023,7 @@ impl<'a> Parser<'a> {
         mut err: DiagnosticBuilder<'a>,
     ) -> PResult<'a, bool> {
         let mut pos = None;
-        // we want to use the last closing delim that would apply
+        // We want to use the last closing delim that would apply.
         for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
             if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
                 && Some(self.token.span) > unmatched.unclosed_span
@@ -1041,7 +1041,7 @@ impl<'a> Parser<'a> {
                 let unmatched = self.unclosed_delims.remove(pos);
                 let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
 
-                 // We want to suggest the inclusion of the closing delimiter where it makes
+                // We want to suggest the inclusion of the closing delimiter where it makes
                 // the most sense, which is immediately after the last token:
                 //
                 //  {foo(bar {}}
@@ -1067,7 +1067,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Recover from `pub` keyword in places where it seems _reasonable_ but isn't valid.
+    /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid.
     crate fn eat_bad_pub(&mut self) {
         if self.token.is_keyword(kw::Pub) {
             match self.parse_visibility(false) {
@@ -1082,21 +1082,21 @@ impl<'a> Parser<'a> {
         }
     }
 
-    // Eat tokens until we can be relatively sure we reached the end of the
-    // statement. This is something of a best-effort heuristic.
-    //
-    // We terminate when we find an unmatched `}` (without consuming it).
-    crate fn recover_stmt(&mut self) {
+    /// Eats tokens until we can be relatively sure we reached the end of the
+    /// statement. This is something of a best-effort heuristic.
+    ///
+    /// We terminate when we find an unmatched `}` (without consuming it).
+    pub fn recover_stmt(&mut self) {
         self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
     }
 
-    // If `break_on_semi` is `Break`, then we will stop consuming tokens after
-    // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
-    // approximate - it can mean we break too early due to macros, but that
-    // should only lead to sub-optimal recovery, not inaccurate parsing).
-    //
-    // If `break_on_block` is `Break`, then we will stop consuming tokens
-    // after finding (and consuming) a brace-delimited block.
+    /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
+    /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
+    /// approximate -- it can mean we break too early due to macros, but that
+    /// should only lead to sub-optimal recovery, not inaccurate parsing).
+    ///
+    /// If `break_on_block` is `Break`, then we will stop consuming tokens
+    /// after finding (and consuming) a brace-delimited block.
     crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
         let mut brace_depth = 0;
         let mut bracket_depth = 0;
diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs
index 652ae95c853..d965bf28ee7 100644
--- a/src/libsyntax/parse/lexer/tests.rs
+++ b/src/libsyntax/parse/lexer/tests.rs
@@ -4,9 +4,10 @@ use crate::symbol::Symbol;
 use crate::source_map::{SourceMap, FilePathMapping};
 use crate::parse::token;
 use crate::with_default_globals;
+
+use errors::{Handler, emitter::EmitterWriter};
 use std::io;
 use std::path::PathBuf;
-use errors::{Handler, emitter::EmitterWriter};
 use syntax_pos::{BytePos, Span};
 
 fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
@@ -21,7 +22,7 @@ fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
     ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
 }
 
-// open a string reader for the given string
+// Creates a string reader for the given string.
 fn setup<'a>(sm: &SourceMap,
                 sess: &'a ParseSess,
                 teststr: String)
@@ -38,7 +39,7 @@ fn t1() {
         let mut string_reader = setup(
             &sm,
             &sh,
-            "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
+            "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_owned(),
         );
         assert_eq!(string_reader.next_token(), token::Comment);
         assert_eq!(string_reader.next_token(), token::Whitespace);
@@ -50,7 +51,7 @@ fn t1() {
         assert_eq!(tok1.kind, tok2.kind);
         assert_eq!(tok1.span, tok2.span);
         assert_eq!(string_reader.next_token(), token::Whitespace);
-        // read another token:
+        // Read another token.
         let tok3 = string_reader.next_token();
         assert_eq!(string_reader.pos.clone(), BytePos(28));
         let tok4 = Token::new(
@@ -65,15 +66,15 @@ fn t1() {
     })
 }
 
-// check that the given reader produces the desired stream
-// of tokens (stop checking after exhausting the expected vec)
+// Checks that the given reader produces the desired stream
+// of tokens (stop checking after exhausting `expected`).
 fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
     for expected_tok in &expected {
         assert_eq!(&string_reader.next_token(), expected_tok);
     }
 }
 
-// make the identifier by looking up the string in the interner
+// Makes the identifier by looking up the string in the interner.
 fn mk_ident(id: &str) -> TokenKind {
     token::Ident(Symbol::intern(id), false)
 }
@@ -201,7 +202,7 @@ fn literal_suffixes() {
                     setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
                     mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
                 );
-                // with a whitespace separator:
+                // with a whitespace separator
                 assert_eq!(
                     setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
                     mk_lit(token::$tok_type, $tok_contents, None),
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index b1af4806e2d..aa57c3954e3 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -13,12 +13,12 @@ use crate::print::pprust;
 use crate::symbol::Symbol;
 
 use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
+use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use rustc_data_structures::sync::{Lrc, Lock, Once};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
 use syntax_pos::edition::Edition;
 use syntax_pos::hygiene::ExpnId;
 
-use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use std::borrow::Cow;
 use std::path::{Path, PathBuf};
 use std::str;
@@ -81,25 +81,27 @@ pub struct ParseSess {
 impl ParseSess {
     pub fn new(file_path_mapping: FilePathMapping) -> Self {
         let cm = Lrc::new(SourceMap::new(file_path_mapping));
-        let handler = Handler::with_tty_emitter(ColorConfig::Auto,
-                                                true,
-                                                None,
-                                                Some(cm.clone()));
+        let handler = Handler::with_tty_emitter(
+            ColorConfig::Auto,
+            true,
+            None,
+            Some(cm.clone()),
+        );
         ParseSess::with_span_handler(handler, cm)
     }
 
-    pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> ParseSess {
-        ParseSess {
+    pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
+        Self {
             span_diagnostic: handler,
             unstable_features: UnstableFeatures::from_environment(),
             config: FxHashSet::default(),
+            edition: ExpnId::root().expn_data().edition,
             missing_fragment_specifiers: Lock::new(FxHashSet::default()),
             raw_identifier_spans: Lock::new(Vec::new()),
             registered_diagnostics: Lock::new(ErrorMap::new()),
             included_mod_stack: Lock::new(vec![]),
             source_map,
             buffered_lints: Lock::new(vec![]),
-            edition: ExpnId::root().expn_data().edition,
             ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
             injected_crate_name: Once::new(),
             gated_spans: GatedSpans::default(),
@@ -155,17 +157,17 @@ pub struct Directory<'a> {
 #[derive(Copy, Clone)]
 pub enum DirectoryOwnership {
     Owned {
-        // None if `mod.rs`, `Some("foo")` if we're in `foo.rs`
+        // None if `mod.rs`, `Some("foo")` if we're in `foo.rs`.
         relative: Option<ast::Ident>,
     },
     UnownedViaBlock,
     UnownedViaMod(bool /* legacy warnings? */),
 }
 
-// a bunch of utility functions of the form parse_<thing>_from_<source>
+// A bunch of utility functions of the form `parse_<thing>_from_<source>`
 // where <thing> includes crate, expr, item, stmt, tts, and one that
 // uses a HOF to parse anything, and <source> includes file and
-// source_str.
+// `source_str`.
 
 pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> {
     let mut parser = new_parser_from_file(sess, input);
@@ -219,14 +221,13 @@ pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source
     Ok(parser)
 }
 
-/// Creates a new parser, handling errors as appropriate
-/// if the file doesn't exist
+/// Creates a new parser, handling errors as appropriate if the file doesn't exist.
 pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
     source_file_to_parser(sess, file_to_source_file(sess, path, None))
 }
 
-/// Creates a new parser, returning buffered diagnostics if the file doesn't
-/// exist or from lexing the initial token stream.
+/// Creates a new parser, returning buffered diagnostics if the file doesn't exist,
+/// or from lexing the initial token stream.
 pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path)
     -> Result<Parser<'a>, Vec<Diagnostic>> {
     let file = try_file_to_source_file(sess, path, None).map_err(|db| vec![db])?;
@@ -234,8 +235,8 @@ pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path)
 }
 
 /// Given a session, a crate config, a path, and a span, add
-/// the file at the given path to the source_map, and return a parser.
-/// On an error, use the given span as the source of the problem.
+/// the file at the given path to the `source_map`, and returns a parser.
+/// On an error, uses the given span as the source of the problem.
 pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
                                     path: &Path,
                                     directory_ownership: DirectoryOwnership,
@@ -247,13 +248,13 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
     p
 }
 
-/// Given a source_file and config, return a parser
+/// Given a `source_file` and config, returns a parser.
 fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic,
                      maybe_source_file_to_parser(sess, source_file))
 }
 
-/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
+/// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the
 /// initial token stream.
 fn maybe_source_file_to_parser(
     sess: &ParseSess,
@@ -270,14 +271,14 @@ fn maybe_source_file_to_parser(
     Ok(parser)
 }
 
-// must preserve old name for now, because quote! from the *existing*
-// compiler expands into it
+// Must preserve old name for now, because `quote!` from the *existing*
+// compiler expands into it.
 pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
     stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS)
 }
 
 
-// base abstractions
+// Base abstractions
 
 /// Given a session and a path and an optional span (for error reporting),
 /// add the path to the session's source_map and return the new source_file or
@@ -296,7 +297,7 @@ fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a session and a path and an optional span (for error reporting),
-/// add the path to the session's `source_map` and return the new `source_file`.
+/// adds the path to the session's `source_map` and returns the new `source_file`.
 fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
                    -> Lrc<SourceFile> {
     match try_file_to_source_file(sess, path, spanopt) {
@@ -308,7 +309,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     }
 }
 
-/// Given a source_file, produces a sequence of token trees.
+/// Given a `source_file`, produces a sequence of token trees.
 pub fn source_file_to_stream(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
@@ -352,7 +353,7 @@ pub fn maybe_file_to_stream(
     }
 }
 
-/// Given stream and the `ParseSess`, produces a parser.
+/// Given a stream and the `ParseSess`, produces a parser.
 pub fn stream_to_parser<'a>(
     sess: &'a ParseSess,
     stream: TokenStream,
@@ -361,7 +362,7 @@ pub fn stream_to_parser<'a>(
     Parser::new(sess, stream, None, true, false, subparser_name)
 }
 
-/// Given stream, the `ParseSess` and the base directory, produces a parser.
+/// Given a stream, the `ParseSess` and the base directory, produces a parser.
 ///
 /// Use this function when you are creating a parser from the token stream
 /// and also care about the current working directory of the parser (e.g.,
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index ab5462baaf7..fcaf5065dac 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -10,22 +10,22 @@ pub use path::PathStyle;
 mod stmt;
 mod generics;
 
-use crate::ast::{self, AttrStyle, Attribute, Param, BindingMode, StrStyle, SelfKind};
-use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind};
-use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar};
-use crate::source_map::{self, respan};
-use crate::parse::{SeqSep, literal, token};
+use crate::ast::{
+    self, DUMMY_NODE_ID, AttrStyle, Attribute, BindingMode, CrateSugar, FnDecl, Ident,
+    IsAsync, MacDelimiter, Mutability, Param, StrStyle, SelfKind, TyKind, Visibility,
+    VisibilityKind, Unsafety,
+};
+use crate::parse::{ParseSess, PResult, Directory, DirectoryOwnership, SeqSep, literal, token};
+use crate::parse::diagnostics::{Error, dummy_arg};
 use crate::parse::lexer::UnmatchedBrace;
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::token::{Token, TokenKind, DelimToken};
-use crate::parse::{ParseSess, Directory, DirectoryOwnership};
 use crate::print::pprust;
 use crate::ptr::P;
-use crate::parse::PResult;
-use crate::ThinVec;
-use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::source_map::{self, respan};
 use crate::symbol::{kw, sym, Symbol};
-use crate::parse::diagnostics::{Error, dummy_arg};
+use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::ThinVec;
 
 use errors::{Applicability, DiagnosticId, FatalError};
 use rustc_target::spec::abi::{self, Abi};
@@ -56,7 +56,7 @@ crate enum BlockMode {
     Ignore,
 }
 
-/// As maybe_whole_expr, but for things other than expressions
+/// Like `maybe_whole_expr`, but for things other than expressions.
 #[macro_export]
 macro_rules! maybe_whole {
     ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
@@ -116,11 +116,11 @@ pub struct Parser<'a> {
     /// with non-interpolated identifier and lifetime tokens they refer to.
     /// Perhaps the normalized / non-normalized setup can be simplified somehow.
     pub token: Token,
-    /// Span of the current non-normalized token.
+    /// The span of the current non-normalized token.
     meta_var_span: Option<Span>,
-    /// Span of the previous non-normalized token.
+    /// The span of the previous non-normalized token.
     pub prev_span: Span,
-    /// Kind of the previous normalized token (in simplified form).
+    /// The kind of the previous normalized token (in simplified form).
     prev_token_kind: PrevTokenKind,
     restrictions: Restrictions,
     /// Used to determine the path to externally loaded source files.
@@ -143,7 +143,7 @@ pub struct Parser<'a> {
     /// See the comments in the `parse_path_segment` function for more details.
     crate unmatched_angle_bracket_count: u32,
     crate max_angle_bracket_count: u32,
-    /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
+    /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
     /// it gets removed from here. Every entry left at the end gets emitted as an independent
     /// error.
     crate unclosed_delims: Vec<UnmatchedBrace>,
@@ -799,14 +799,14 @@ impl<'a> Parser<'a> {
                             break;
                         }
                         Err(mut e) => {
-                            // Attempt to keep parsing if it was a similar separator
+                            // Attempt to keep parsing if it was a similar separator.
                             if let Some(ref tokens) = t.similar_tokens() {
                                 if tokens.contains(&self.token.kind) {
                                     self.bump();
                                 }
                             }
                             e.emit();
-                            // Attempt to keep parsing if it was an omitted separator
+                            // Attempt to keep parsing if it was an omitted separator.
                             match f(self) {
                                 Ok(t) => {
                                     v.push(t);
@@ -871,7 +871,7 @@ impl<'a> Parser<'a> {
         self.parse_delim_comma_seq(token::Paren, f)
     }
 
-    /// Advance the parser by one token
+    /// Advance the parser by one token.
     pub fn bump(&mut self) {
         if self.prev_token_kind == PrevTokenKind::Eof {
             // Bumping after EOF is a bad sign, usually an infinite loop.
@@ -894,17 +894,17 @@ impl<'a> Parser<'a> {
 
         self.token = self.next_tok();
         self.expected_tokens.clear();
-        // check after each token
+        // Check after each token.
         self.process_potential_macro_variable();
     }
 
-    /// Advance the parser using provided token as a next one. Use this when
+    /// Advances the parser using provided token as a next one. Use this when
     /// consuming a part of a token. For example a single `<` from `<<`.
     fn bump_with(&mut self, next: TokenKind, span: Span) {
         self.prev_span = self.token.span.with_hi(span.lo());
         // It would be incorrect to record the kind of the current token, but
         // fortunately for tokens currently using `bump_with`, the
-        // prev_token_kind will be of no use anyway.
+        // `prev_token_kind` will be of no use anyway.
         self.prev_token_kind = PrevTokenKind::Other;
         self.token = Token::new(next, span);
         self.expected_tokens.clear();
@@ -937,8 +937,8 @@ impl<'a> Parser<'a> {
     fn parse_asyncness(&mut self) -> IsAsync {
         if self.eat_keyword(kw::Async) {
             IsAsync::Async {
-                closure_id: ast::DUMMY_NODE_ID,
-                return_impl_trait_id: ast::DUMMY_NODE_ID,
+                closure_id: DUMMY_NODE_ID,
+                return_impl_trait_id: DUMMY_NODE_ID,
             }
         } else {
             IsAsync::NotAsync
@@ -1040,7 +1040,7 @@ impl<'a> Parser<'a> {
 
         let span = lo.to(self.token.span);
 
-        Ok(Param { attrs: attrs.into(), id: ast::DUMMY_NODE_ID, pat, span, ty })
+        Ok(Param { attrs: attrs.into(), id: DUMMY_NODE_ID, pat, span, ty })
     }
 
     /// Parses mutability (`mut` or nothing).
@@ -1497,7 +1497,7 @@ impl<'a> Parser<'a> {
                         format!("in {}", path),
                         Applicability::MachineApplicable,
                     )
-                    .emit();  // emit diagnostic, but continue with public visibility
+                    .emit(); // Emit diagnostic, but continue with public visibility.
             }
         }
 
diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs
index e8c8e199fd0..f70c607198f 100644
--- a/src/libsyntax/parse/parser/expr.rs
+++ b/src/libsyntax/parse/parser/expr.rs
@@ -1,26 +1,26 @@
-use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle};
-use super::{BlockMode, SemiColonMode};
-use super::{SeqSep, TokenExpectType};
+use super::{
+    Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode, SemiColonMode,
+    SeqSep, TokenExpectType,
+};
 use super::pat::{GateOr, PARAM_EXPECTED};
 
+use crate::ast::{
+    self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode,
+    Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind,
+    FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field,
+};
 use crate::maybe_recover_from_interpolated_ty_qpath;
-use crate::ptr::P;
-use crate::ast::{self, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode};
-use crate::ast::{Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm};
-use crate::ast::{Ty, TyKind, FunctionRetTy, Param, FnDecl};
-use crate::ast::{BinOpKind, BinOp, UnOp};
-use crate::ast::{Mac, AnonConst, Field};
-
 use crate::parse::classify;
 use crate::parse::token::{self, Token};
-use crate::parse::diagnostics::{Error};
+use crate::parse::diagnostics::Error;
 use crate::print::pprust;
+use crate::ptr::P;
 use crate::source_map::{self, Span};
 use crate::symbol::{kw, sym};
 use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
 
-use std::mem;
 use errors::Applicability;
+use std::mem;
 use rustc_data_structures::thin_vec::ThinVec;
 
 /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
@@ -51,7 +51,7 @@ macro_rules! maybe_whole_expr {
                         $p.token.span, ExprKind::Block(block, None), ThinVec::new()
                     ));
                 }
-                // N.B: `NtIdent(ident)` is normalized to `Ident` in `fn bump`.
+                // N.B., `NtIdent(ident)` is normalized to `Ident` in `fn bump`.
                 _ => {},
             };
         }
@@ -340,7 +340,7 @@ impl<'a> Parser<'a> {
 
     fn is_at_start_of_range_notation_rhs(&self) -> bool {
         if self.token.can_begin_expr() {
-            // parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
+            // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
             if self.token == token::OpenDelim(token::Brace) {
                 return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
             }
@@ -350,12 +350,12 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr`
+    /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
     fn parse_prefix_range_expr(
         &mut self,
         already_parsed_attrs: Option<ThinVec<Attribute>>
     ) -> PResult<'a, P<Expr>> {
-        // Check for deprecated `...` syntax
+        // Check for deprecated `...` syntax.
         if self.token == token::DotDotDot {
             self.err_dotdotdot_syntax(self.token.span);
         }
@@ -389,7 +389,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo.to(hi), r, attrs))
     }
 
-    /// Parse a prefix-unary-operator expr
+    /// Parses a prefix-unary-operator expr.
     fn parse_prefix_expr(
         &mut self,
         already_parsed_attrs: Option<ThinVec<Attribute>>
@@ -549,7 +549,7 @@ impl<'a> Parser<'a> {
                         let expr = mk_expr(self, P(Ty {
                             span: path.span,
                             node: TyKind::Path(None, path),
-                            id: ast::DUMMY_NODE_ID
+                            id: DUMMY_NODE_ID,
                         }));
 
                         let expr_str = self.span_to_snippet(expr.span)
@@ -565,7 +565,7 @@ impl<'a> Parser<'a> {
                                 expr.span,
                                 &format!("try {} the cast value", op_verb),
                                 format!("({})", expr_str),
-                                Applicability::MachineApplicable
+                                Applicability::MachineApplicable,
                             )
                             .emit();
 
@@ -741,7 +741,6 @@ impl<'a> Parser<'a> {
         })
     }
 
-
     /// At the bottom (top?) of the precedence hierarchy,
     /// Parses things like parenthesized exprs, macros, `return`, etc.
     ///
@@ -755,7 +754,7 @@ impl<'a> Parser<'a> {
         // added to the return value after the fact.
         //
         // Therefore, prevent sub-parser from parsing
-        // attributes by giving them a empty "already parsed" list.
+        // attributes by giving them a empty "already-parsed" list.
         let mut attrs = ThinVec::new();
 
         let lo = self.token.span;
@@ -778,7 +777,7 @@ impl<'a> Parser<'a> {
             }
         }
 
-        // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr().
+        // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
         match self.token.kind {
             // This match arm is a special-case of the `_` match arm below and
             // could be removed without changing functionality, but it's faster
@@ -791,8 +790,8 @@ impl<'a> Parser<'a> {
 
                 attrs.extend(self.parse_inner_attributes()?);
 
-                // (e) is parenthesized e
-                // (e,) is a tuple with only one field, e
+                // `(e)` is parenthesized `e`.
+                // `(e,)` is a tuple with only one field, `e`.
                 let mut es = vec![];
                 let mut trailing_comma = false;
                 let mut recovered = false;
@@ -800,7 +799,7 @@ impl<'a> Parser<'a> {
                     es.push(match self.parse_expr() {
                         Ok(es) => es,
                         Err(mut err) => {
-                            // recover from parse error in tuple list
+                            // Recover from parse error in tuple list.
                             match self.token.kind {
                                 token::Ident(name, false)
                                 if name == kw::Underscore && self.look_ahead(1, |t| {
@@ -844,7 +843,7 @@ impl<'a> Parser<'a> {
                 return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs);
             }
             token::BinOp(token::Or) | token::OrOr => {
-                return self.parse_lambda_expr(attrs);
+                return self.parse_closure(attrs);
             }
             token::OpenDelim(token::Bracket) => {
                 self.bump();
@@ -852,21 +851,21 @@ impl<'a> Parser<'a> {
                 attrs.extend(self.parse_inner_attributes()?);
 
                 if self.eat(&token::CloseDelim(token::Bracket)) {
-                    // Empty vector.
+                    // Empty vector
                     ex = ExprKind::Array(Vec::new());
                 } else {
-                    // Nonempty vector.
+                    // Non-empty vector
                     let first_expr = self.parse_expr()?;
                     if self.eat(&token::Semi) {
-                        // Repeating array syntax: [ 0; 512 ]
+                        // Repeating array syntax: `[ 0; 512 ]`
                         let count = AnonConst {
-                            id: ast::DUMMY_NODE_ID,
+                            id: DUMMY_NODE_ID,
                             value: self.parse_expr()?,
                         };
                         self.expect(&token::CloseDelim(token::Bracket))?;
                         ex = ExprKind::Repeat(first_expr, count);
                     } else if self.eat(&token::Comma) {
-                        // Vector with two or more elements.
+                        // Vector with two or more elements
                         let remaining_exprs = self.parse_seq_to_end(
                             &token::CloseDelim(token::Bracket),
                             SeqSep::trailing_allowed(token::Comma),
@@ -876,7 +875,7 @@ impl<'a> Parser<'a> {
                         exprs.extend(remaining_exprs);
                         ex = ExprKind::Array(exprs);
                     } else {
-                        // Vector with one element.
+                        // Vector with one element
                         self.expect(&token::CloseDelim(token::Bracket))?;
                         ex = ExprKind::Array(vec![first_expr]);
                     }
@@ -892,7 +891,7 @@ impl<'a> Parser<'a> {
                 if self.token.is_path_start() {
                     let path = self.parse_path(PathStyle::Expr)?;
 
-                    // `!`, as an operator, is prefix, so we know this isn't that
+                    // `!`, as an operator, is prefix, so we know this isn't that.
                     if self.eat(&token::Not) {
                         // MACRO INVOCATION expression
                         let (delim, tts) = self.expect_delimited_token_tree()?;
@@ -920,7 +919,7 @@ impl<'a> Parser<'a> {
                     return self.maybe_recover_from_bad_qpath(expr, true);
                 }
                 if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
-                    return self.parse_lambda_expr(attrs);
+                    return self.parse_closure(attrs);
                 }
                 if self.eat_keyword(kw::If) {
                     return self.parse_if_expr(attrs);
@@ -991,13 +990,13 @@ impl<'a> Parser<'a> {
                     return self.parse_try_block(lo, attrs);
                 }
 
-                // Span::rust_2018() is somewhat expensive; don't get it repeatedly.
+                // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
                 let is_span_rust_2018 = self.token.span.rust_2018();
                 if is_span_rust_2018 && self.check_keyword(kw::Async) {
-                    return if self.is_async_block() { // check for `async {` and `async move {`
+                    return if self.is_async_block() { // Check for `async {` and `async move {`.
                         self.parse_async_block(attrs)
                     } else {
-                        self.parse_lambda_expr(attrs)
+                        self.parse_closure(attrs)
                     };
                 }
                 if self.eat_keyword(kw::Return) {
@@ -1043,13 +1042,12 @@ impl<'a> Parser<'a> {
                         // recovery in order to keep the error count down. Fixing the
                         // delimiters will possibly also fix the bare semicolon found in
                         // expression context. For example, silence the following error:
-                        // ```
-                        // error: expected expression, found `;`
-                        //  --> file.rs:2:13
-                        //   |
-                        // 2 |     foo(bar(;
-                        //   |             ^ expected expression
-                        // ```
+                        //
+                        //     error: expected expression, found `;`
+                        //      --> file.rs:2:13
+                        //       |
+                        //     2 |     foo(bar(;
+                        //       |             ^ expected expression
                         self.bump();
                         return Ok(self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()));
                     }
@@ -1096,11 +1094,11 @@ impl<'a> Parser<'a> {
         attrs.extend(self.parse_inner_attributes()?);
 
         let blk = self.parse_block_tail(lo, blk_mode)?;
-        return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
+        Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs))
     }
 
-    /// Parses `move |args| expr`.
-    fn parse_lambda_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+    /// Parses a closure (e.g., `move |args| expr`).
+    fn parse_closure(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
         let movability = if self.eat_keyword(kw::Static) {
@@ -1115,7 +1113,7 @@ impl<'a> Parser<'a> {
             IsAsync::NotAsync
         };
         if asyncness.is_async() {
-            // Feature gate `async ||` closures.
+            // Feature-gate `async ||` closures.
             self.sess.gated_spans.async_closure.borrow_mut().push(self.prev_span);
         }
 
@@ -1128,8 +1126,7 @@ impl<'a> Parser<'a> {
                 self.parse_expr_res(restrictions, None)?
             },
             _ => {
-                // If an explicit return type is given, require a
-                // block to appear (RFC 968).
+                // If an explicit return type is given, require a block to appear (RFC 968).
                 let body_lo = self.token.span;
                 self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
             }
@@ -1141,7 +1138,7 @@ impl<'a> Parser<'a> {
             attrs))
     }
 
-    /// Parse an optional `move` prefix to a closure lke construct.
+    /// Parses an optional `move` prefix to a closure lke construct.
     fn parse_capture_clause(&mut self) -> CaptureBy {
         if self.eat_keyword(kw::Move) {
             CaptureBy::Value
@@ -1176,7 +1173,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parses a parameter in a lambda header (e.g., `|arg, arg|`).
+    /// Parses a parameter in a closure header (e.g., `|arg, arg|`).
     fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
         let lo = self.token.span;
         let attrs = self.parse_param_attributes()?;
@@ -1185,7 +1182,7 @@ impl<'a> Parser<'a> {
             self.parse_ty()?
         } else {
             P(Ty {
-                id: ast::DUMMY_NODE_ID,
+                id: DUMMY_NODE_ID,
                 node: TyKind::Infer,
                 span: self.prev_span,
             })
@@ -1196,7 +1193,7 @@ impl<'a> Parser<'a> {
             ty: t,
             pat,
             span,
-            id: ast::DUMMY_NODE_ID
+            id: DUMMY_NODE_ID
         })
     }
 
@@ -1233,7 +1230,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
     }
 
-    /// Parse the condition of a `if`- or `while`-expression
+    /// Parses the condition of a `if` or `while` expression.
     fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
         let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
 
@@ -1261,7 +1258,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span, ExprKind::Let(pat, expr), attrs))
     }
 
-    /// `else` token already eaten
+    /// Parses an `else { ... }` expression (`else` token already eaten).
     fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
         if self.eat_keyword(kw::If) {
             return self.parse_if_expr(ThinVec::new());
@@ -1271,7 +1268,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse a 'for' .. 'in' expression ('for' token already eaten)
+    /// Parses a `for ... in` expression (`for` token already eaten).
     fn parse_for_expr(
         &mut self,
         opt_label: Option<Label>,
@@ -1327,7 +1324,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs))
     }
 
-    /// Parse `loop {...}`, `loop` token already eaten.
+    /// Parses `loop { ... }` (`loop` token already eaten).
     fn parse_loop_expr(
         &mut self,
         opt_label: Option<Label>,
@@ -1350,7 +1347,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    // `match` token already eaten
+    /// Parses a `match ... { ... }` expression (`match` token already eaten).
     fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
         let match_span = self.prev_span;
         let lo = self.prev_span;
@@ -1457,7 +1454,7 @@ impl<'a> Parser<'a> {
             guard,
             body: expr,
             span: lo.to(hi),
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
         })
     }
 
@@ -1491,7 +1488,7 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw::Try) &&
         self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
         self.token.span.rust_2018() &&
-        // prevent `while try {} {}`, `if try {} {} else {}`, etc.
+        // Prevent `while try {} {}`, `if try {} {} else {}`, etc.
         !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
@@ -1504,7 +1501,7 @@ impl<'a> Parser<'a> {
         attrs.extend(iattrs);
         Ok(self.mk_expr(
             span_lo.to(body.span),
-            ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
+            ExprKind::Async(capture_clause, DUMMY_NODE_ID, body), attrs))
     }
 
     fn is_async_block(&self) -> bool {
@@ -1527,18 +1524,18 @@ impl<'a> Parser<'a> {
     ) -> Option<PResult<'a, P<Expr>>> {
         let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
         let certainly_not_a_block = || self.look_ahead(1, |t| t.is_ident()) && (
-            // `{ ident, ` cannot start a block
+            // `{ ident, ` cannot start a block.
             self.look_ahead(2, |t| t == &token::Comma) ||
             self.look_ahead(2, |t| t == &token::Colon) && (
-                // `{ ident: token, ` cannot start a block
+                // `{ ident: token, ` cannot start a block.
                 self.look_ahead(4, |t| t == &token::Comma) ||
-                // `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`
+                // `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`.
                 self.look_ahead(3, |t| !t.can_begin_type())
             )
         );
 
         if struct_allowed || certainly_not_a_block() {
-            // This is a struct literal, but we don't can't accept them here
+            // This is a struct literal, but we don't can't accept them here.
             let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone());
             if let (Ok(expr), false) = (&expr, struct_allowed) {
                 self.struct_span_err(
@@ -1606,14 +1603,14 @@ impl<'a> Parser<'a> {
             let mut recovery_field = None;
             if let token::Ident(name, _) = self.token.kind {
                 if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
-                    // Use in case of error after field-looking code: `S { foo: () with a }`
+                    // Use in case of error after field-looking code: `S { foo: () with a }`.
                     recovery_field = Some(ast::Field {
                         ident: Ident::new(name, self.token.span),
                         span: self.token.span,
                         expr: self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()),
                         is_shorthand: false,
                         attrs: ThinVec::new(),
-                        id: ast::DUMMY_NODE_ID,
+                        id: DUMMY_NODE_ID,
                     });
                 }
             }
@@ -1639,7 +1636,7 @@ impl<'a> Parser<'a> {
             match self.expect_one_of(&[token::Comma],
                                      &[token::CloseDelim(token::Brace)]) {
                 Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
-                    // only include the field if there's no parse error for the field name
+                    // Only include the field if there's no parse error for the field name.
                     fields.push(f);
                 }
                 Err(mut e) => {
@@ -1659,7 +1656,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
     }
 
-    /// Parse ident (COLON expr)?
+    /// Parses `ident (COLON expr)?`.
     fn parse_field(&mut self) -> PResult<'a, Field> {
         let attrs = self.parse_outer_attributes()?;
         let lo = self.token.span;
@@ -1699,7 +1696,7 @@ impl<'a> Parser<'a> {
             expr,
             is_shorthand,
             attrs: attrs.into(),
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
         })
     }
 
@@ -1772,6 +1769,6 @@ impl<'a> Parser<'a> {
     }
 
     crate fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
-        P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
+        P(Expr { node, span, attrs, id: DUMMY_NODE_ID })
     }
 }
diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs
index 59a3ade9c30..be7fc48fdaf 100644
--- a/src/libsyntax/parse/parser/item.rs
+++ b/src/libsyntax/parse/parser/item.rs
@@ -2,34 +2,36 @@ use super::{Parser, PResult, PathStyle, SemiColonMode, BlockMode};
 
 use crate::maybe_whole;
 use crate::ptr::P;
-use crate::ast::{self, Ident, Attribute, AttrStyle};
-use crate::ast::{Item, ItemKind, ImplItem, TraitItem, TraitItemKind};
-use crate::ast::{UseTree, UseTreeKind, PathSegment};
-use crate::ast::{IsAuto, Constness, IsAsync, Unsafety, Defaultness};
-use crate::ast::{Visibility, VisibilityKind, Mutability, FnDecl, FnHeader};
-use crate::ast::{ForeignItem, ForeignItemKind};
-use crate::ast::{Ty, TyKind, GenericBounds, TraitRef};
-use crate::ast::{EnumDef, VariantData, StructField, AnonConst};
-use crate::ast::{Mac, MacDelimiter};
+use crate::ast::{
+    self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle,
+    Item, ItemKind, ImplItem, TraitItem, TraitItemKind,
+    UseTree, UseTreeKind, PathSegment,
+    IsAuto, Constness, IsAsync, Unsafety, Defaultness,
+    Visibility, VisibilityKind, Mutability, FnDecl, FnHeader,
+    ForeignItem, ForeignItemKind,
+    Ty, TyKind, Generics, GenericBounds, TraitRef,
+    EnumDef, VariantData, StructField, AnonConst,
+    Mac, MacDelimiter,
+};
 use crate::ext::base::DummyResult;
 use crate::parse::token;
 use crate::parse::parser::maybe_append;
-use crate::parse::diagnostics::{Error};
+use crate::parse::diagnostics::Error;
 use crate::tokenstream::{TokenTree, TokenStream};
 use crate::source_map::{respan, Span, Spanned};
 use crate::symbol::{kw, sym};
 
 use std::mem;
 use log::debug;
-use rustc_target::spec::abi::{Abi};
+use rustc_target::spec::abi::Abi;
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
 
-/// Whether the type alias or associated type is a concrete type or an opaque type
+/// Whether the type alias or associated type is a concrete type or an opaque type.
 #[derive(Debug)]
 pub enum AliasKind {
-    /// Just a new name for the same type
+    /// Just a new name for the same type.
     Weak(P<Ty>),
-    /// Only trait impls of the type will be usable, not the actual type itself
+    /// Only trait impls of the type will be usable, not the actual type itself.
     OpaqueTy(GenericBounds),
 }
 
@@ -200,7 +202,7 @@ impl<'a> Parser<'a> {
             return Ok(Some(item));
         }
 
-        // Parse `async unsafe? fn`.
+        // Parses `async unsafe? fn`.
         if self.check_keyword(kw::Async) {
             let async_span = self.token.span;
             if self.is_keyword_ahead(1, &[kw::Fn])
@@ -214,8 +216,8 @@ impl<'a> Parser<'a> {
                 let (ident, item_, extra_attrs) =
                     self.parse_item_fn(unsafety,
                                     respan(async_span, IsAsync::Async {
-                                        closure_id: ast::DUMMY_NODE_ID,
-                                        return_impl_trait_id: ast::DUMMY_NODE_ID,
+                                        closure_id: DUMMY_NODE_ID,
+                                        return_impl_trait_id: DUMMY_NODE_ID,
                                     }),
                                     respan(fn_span, Constness::NotConst),
                                     Abi::Rust)?;
@@ -286,7 +288,7 @@ impl<'a> Parser<'a> {
             && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
             // UNSAFE FUNCTION ITEM
             self.bump(); // `unsafe`
-            // `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
+            // `{` is also expected after `unsafe`; in case of error, include it in the diagnostic.
             self.check(&token::OpenDelim(token::Brace));
             let abi = if self.eat_keyword(kw::Extern) {
                 self.parse_opt_abi()?.unwrap_or(Abi::C)
@@ -521,7 +523,7 @@ impl<'a> Parser<'a> {
 
             let mac_lo = self.token.span;
 
-            // item macro.
+            // Item macro
             let path = self.parse_path(PathStyle::Mod)?;
             self.expect(&token::Not)?;
             let (delim, tts) = self.expect_delimited_token_tree()?;
@@ -659,7 +661,7 @@ impl<'a> Parser<'a> {
         let mut generics = if self.choose_generics_over_qpath() {
             self.parse_generics()?
         } else {
-            ast::Generics::default()
+            Generics::default()
         };
 
         // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
@@ -676,7 +678,7 @@ impl<'a> Parser<'a> {
                           self.look_ahead(1, |t| t != &token::Lt) {
             let span = self.prev_span.between(self.token.span);
             self.struct_span_err(span, "missing trait in a trait impl").emit();
-            P(Ty { node: TyKind::Path(None, err_path(span)), span, id: ast::DUMMY_NODE_ID })
+            P(Ty { node: TyKind::Path(None, err_path(span)), span, id: DUMMY_NODE_ID })
         } else {
             self.parse_ty()?
         };
@@ -798,7 +800,7 @@ impl<'a> Parser<'a> {
             self.expect(&token::Eq)?;
             let expr = self.parse_expr()?;
             self.expect(&token::Semi)?;
-            (name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default())
+            (name, ast::ImplItemKind::Const(typ, expr), Generics::default())
         } else {
             let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?;
             attrs.extend(inner_attrs);
@@ -806,7 +808,7 @@ impl<'a> Parser<'a> {
         };
 
         Ok(ImplItem {
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             span: lo.to(self.prev_span),
             ident: name,
             vis,
@@ -847,14 +849,13 @@ impl<'a> Parser<'a> {
             !self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe])
     }
 
-    /// Parse a method or a macro invocation in a trait impl.
+    /// Parses a method or a macro invocation in a trait impl.
     fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
-                         -> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
-                             ast::ImplItemKind)> {
-        // code copied from parse_macro_use_or_failure... abstraction!
+                         -> PResult<'a, (Ident, Vec<Attribute>, Generics, ast::ImplItemKind)> {
+        // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction!
         if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
             // method macro
-            Ok((Ident::invalid(), vec![], ast::Generics::default(),
+            Ok((Ident::invalid(), vec![], Generics::default(),
                 ast::ImplItemKind::Macro(mac)))
         } else {
             let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
@@ -930,7 +931,7 @@ impl<'a> Parser<'a> {
         };
 
         if self.eat(&token::Eq) {
-            // it's a trait alias
+            // It's a trait alias.
             let bounds = self.parse_generic_bounds(None)?;
             tps.where_clause = self.parse_where_clause()?;
             self.expect(&token::Semi)?;
@@ -948,7 +949,7 @@ impl<'a> Parser<'a> {
             }
             Ok((ident, ItemKind::TraitAlias(tps, bounds), None))
         } else {
-            // it's a normal trait
+            // It's a normal trait.
             tps.where_clause = self.parse_where_clause()?;
             self.expect(&token::OpenDelim(token::Brace))?;
             let mut trait_items = vec![];
@@ -1023,10 +1024,10 @@ impl<'a> Parser<'a> {
                 self.expect(&token::Semi)?;
                 None
             };
-            (ident, TraitItemKind::Const(ty, default), ast::Generics::default())
+            (ident, TraitItemKind::Const(ty, default), Generics::default())
         } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
             // trait item macro.
-            (Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
+            (Ident::invalid(), ast::TraitItemKind::Macro(mac), Generics::default())
         } else {
             let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
 
@@ -1089,7 +1090,7 @@ impl<'a> Parser<'a> {
         };
 
         Ok(TraitItem {
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             ident: name,
             attrs,
             generics,
@@ -1103,7 +1104,7 @@ impl<'a> Parser<'a> {
     ///
     ///     TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
     fn parse_trait_item_assoc_ty(&mut self)
-        -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
+        -> PResult<'a, (Ident, TraitItemKind, Generics)> {
         let ident = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
 
@@ -1165,7 +1166,7 @@ impl<'a> Parser<'a> {
                     UseTreeKind::Nested(self.parse_use_tree_list()?)
                 }
             } else {
-                UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID)
+                UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
             }
         };
 
@@ -1178,7 +1179,7 @@ impl<'a> Parser<'a> {
     /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
     /// ```
     fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
-        self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, ast::DUMMY_NODE_ID)))
+        self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
             .map(|(r, _)| r)
     }
 
@@ -1240,9 +1241,9 @@ impl<'a> Parser<'a> {
         let mut idents = vec![];
         let mut replacement = vec![];
         let mut fixed_crate_name = false;
-        // Accept `extern crate name-like-this` for better diagnostics
+        // Accept `extern crate name-like-this` for better diagnostics.
         let dash = token::BinOp(token::BinOpToken::Minus);
-        if self.token == dash {  // Do not include `-` as part of the expected tokens list
+        if self.token == dash {  // Do not include `-` as part of the expected tokens list.
             while self.eat(&dash) {
                 fixed_crate_name = true;
                 replacement.push((self.prev_span, "_".to_string()));
@@ -1283,7 +1284,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the name and optional generic types of a function header.
-    fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
+    fn parse_fn_header(&mut self) -> PResult<'a, (Ident, Generics)> {
         let id = self.parse_ident()?;
         let generics = self.parse_generics()?;
         Ok((id, generics))
@@ -1379,7 +1380,7 @@ impl<'a> Parser<'a> {
                     ForeignItem {
                         ident: Ident::invalid(),
                         span: lo.to(self.prev_span),
-                        id: ast::DUMMY_NODE_ID,
+                        id: DUMMY_NODE_ID,
                         attrs,
                         vis: visibility,
                         node: ForeignItemKind::Macro(mac),
@@ -1415,7 +1416,7 @@ impl<'a> Parser<'a> {
             ident,
             attrs,
             node: ForeignItemKind::Fn(decl, generics),
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             span: lo.to(hi),
             vis,
         })
@@ -1435,7 +1436,7 @@ impl<'a> Parser<'a> {
             ident,
             attrs,
             node: ForeignItemKind::Static(ty, mutbl),
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             span: lo.to(hi),
             vis,
         })
@@ -1453,7 +1454,7 @@ impl<'a> Parser<'a> {
             ident,
             attrs,
             node: ForeignItemKind::Ty,
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             span: lo.to(hi),
             vis
         })
@@ -1461,7 +1462,7 @@ impl<'a> Parser<'a> {
 
     fn is_static_global(&mut self) -> bool {
         if self.check_keyword(kw::Static) {
-            // Check if this could be a closure
+            // Check if this could be a closure.
             !self.look_ahead(1, |token| {
                 if token.is_keyword(kw::Move) {
                     return true;
@@ -1492,7 +1493,7 @@ impl<'a> Parser<'a> {
 
     /// Parses `type Foo = Bar;` or returns `None`
     /// without modifying the parser state.
-    fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
+    fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, Generics)>> {
         // This parses the grammar:
         //     Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
         if self.eat_keyword(kw::Type) {
@@ -1503,7 +1504,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a type alias or opaque type.
-    fn parse_type_alias(&mut self) -> PResult<'a, (Ident, AliasKind, ast::Generics)> {
+    fn parse_type_alias(&mut self) -> PResult<'a, (Ident, AliasKind, Generics)> {
         let ident = self.parse_ident()?;
         let mut tps = self.parse_generics()?;
         tps.where_clause = self.parse_where_clause()?;
@@ -1536,7 +1537,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the part of an enum declaration following the `{`.
-    fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
+    fn parse_enum_def(&mut self, _generics: &Generics) -> PResult<'a, EnumDef> {
         let mut variants = Vec::new();
         while self.token != token::CloseDelim(token::Brace) {
             let variant_attrs = self.parse_outer_attributes()?;
@@ -1552,15 +1553,15 @@ impl<'a> Parser<'a> {
             } else if self.check(&token::OpenDelim(token::Paren)) {
                 VariantData::Tuple(
                     self.parse_tuple_struct_body()?,
-                    ast::DUMMY_NODE_ID,
+                    DUMMY_NODE_ID,
                 )
             } else {
-                VariantData::Unit(ast::DUMMY_NODE_ID)
+                VariantData::Unit(DUMMY_NODE_ID)
             };
 
             let disr_expr = if self.eat(&token::Eq) {
                 Some(AnonConst {
-                    id: ast::DUMMY_NODE_ID,
+                    id: DUMMY_NODE_ID,
                     value: self.parse_expr()?,
                 })
             } else {
@@ -1569,7 +1570,7 @@ impl<'a> Parser<'a> {
 
             let vr = ast::Variant {
                 ident,
-                id: ast::DUMMY_NODE_ID,
+                id: DUMMY_NODE_ID,
                 attrs: variant_attrs,
                 data: struct_def,
                 disr_expr,
@@ -1622,7 +1623,7 @@ impl<'a> Parser<'a> {
             generics.where_clause = self.parse_where_clause()?;
             if self.eat(&token::Semi) {
                 // If we see a: `struct Foo<T> where T: Copy;` style decl.
-                VariantData::Unit(ast::DUMMY_NODE_ID)
+                VariantData::Unit(DUMMY_NODE_ID)
             } else {
                 // If we see: `struct Foo<T> where T: Copy { ... }`
                 let (fields, recovered) = self.parse_record_struct_body()?;
@@ -1630,14 +1631,14 @@ impl<'a> Parser<'a> {
             }
         // No `where` so: `struct Foo<T>;`
         } else if self.eat(&token::Semi) {
-            VariantData::Unit(ast::DUMMY_NODE_ID)
+            VariantData::Unit(DUMMY_NODE_ID)
         // Record-style struct definition
         } else if self.token == token::OpenDelim(token::Brace) {
             let (fields, recovered) = self.parse_record_struct_body()?;
             VariantData::Struct(fields, recovered)
         // Tuple-style struct definition with optional where-clause.
         } else if self.token == token::OpenDelim(token::Paren) {
-            let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID);
+            let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
             generics.where_clause = self.parse_where_clause()?;
             self.expect(&token::Semi)?;
             body
@@ -1726,7 +1727,7 @@ impl<'a> Parser<'a> {
                 span: lo.to(ty.span),
                 vis,
                 ident: None,
-                id: ast::DUMMY_NODE_ID,
+                id: DUMMY_NODE_ID,
                 ty,
                 attrs,
             })
@@ -1817,7 +1818,7 @@ impl<'a> Parser<'a> {
             span: lo.to(self.prev_span),
             ident: Some(name),
             vis,
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             ty,
             attrs,
         })
@@ -1909,7 +1910,7 @@ impl<'a> Parser<'a> {
         P(Item {
             ident,
             attrs,
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             node,
             vis,
             span,
diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs
index 3f6f87b1c44..2d2fb487d7d 100644
--- a/src/libsyntax/parse/parser/module.rs
+++ b/src/libsyntax/parse/parser/module.rs
@@ -36,12 +36,12 @@ impl<'a> Parser<'a> {
         krate
     }
 
-    /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
+    /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
     pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
         let (in_cfg, outer_attrs) = {
             let mut strip_unconfigured = crate::config::StripUnconfigured {
                 sess: self.sess,
-                features: None, // don't perform gated feature checking
+                features: None, // Don't perform gated feature checking.
             };
             let mut outer_attrs = outer_attrs.to_owned();
             strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
@@ -57,7 +57,7 @@ impl<'a> Parser<'a> {
                     self.submod_path(id, &outer_attrs, id_span)?;
                 let (module, mut attrs) =
                     self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
-                // Record that we fetched the mod from an external file
+                // Record that we fetched the mod from an external file.
                 if warn {
                     let attr = attr::mk_attr_outer(
                         attr::mk_word_item(Ident::with_dummy_span(sym::warn_directory_ownership)));
diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs
index 669f657160b..49f8d58c6a7 100644
--- a/src/libsyntax/parse/parser/pat.rs
+++ b/src/libsyntax/parse/parser/pat.rs
@@ -844,14 +844,14 @@ impl<'a> Parser<'a> {
         // Check if a colon exists one ahead. This means we're parsing a fieldname.
         let hi;
         let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
-            // Parsing a pattern of the form "fieldname: pat"
+            // Parsing a pattern of the form `fieldname: pat`.
             let fieldname = self.parse_field_name()?;
             self.bump();
             let pat = self.parse_pat_with_or_inner()?;
             hi = pat.span;
             (pat, fieldname, false)
         } else {
-            // Parsing a pattern of the form "(box) (ref) (mut) fieldname"
+            // Parsing a pattern of the form `(box) (ref) (mut) fieldname`.
             let is_box = self.eat_keyword(kw::Box);
             let boxed_span = self.token.span;
             let is_ref = self.eat_keyword(kw::Ref);
diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs
index 748c1a168b0..6a3ac2d73f8 100644
--- a/src/libsyntax/parse/parser/stmt.rs
+++ b/src/libsyntax/parse/parser/stmt.rs
@@ -5,12 +5,12 @@ use super::pat::GateOr;
 
 use crate::ptr::P;
 use crate::{maybe_whole, ThinVec};
-use crate::ast::{self, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
+use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
 use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
 use crate::ext::base::DummyResult;
 use crate::parse::{classify, DirectoryOwnership};
 use crate::parse::diagnostics::Error;
-use crate::parse::token::{self};
+use crate::parse::token;
 use crate::source_map::{respan, Span};
 use crate::symbol::{kw, sym};
 
@@ -18,7 +18,7 @@ use std::mem;
 use errors::Applicability;
 
 impl<'a> Parser<'a> {
-    /// Parse a statement. This stops just before trailing semicolons on everything but items.
+    /// Parses a statement. This stops just before trailing semicolons on everything but items.
     /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
     pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
         Ok(self.parse_stmt_(true))
@@ -43,7 +43,7 @@ impl<'a> Parser<'a> {
 
         Ok(Some(if self.eat_keyword(kw::Let) {
             Stmt {
-                id: ast::DUMMY_NODE_ID,
+                id: DUMMY_NODE_ID,
                 node: StmtKind::Local(self.parse_local(attrs.into())?),
                 span: lo.to(self.prev_span),
             }
@@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
             lo,
         )? {
             Stmt {
-                id: ast::DUMMY_NODE_ID,
+                id: DUMMY_NODE_ID,
                 node: StmtKind::Item(macro_def),
                 span: lo.to(self.prev_span),
             }
@@ -85,7 +85,7 @@ impl<'a> Parser<'a> {
                 })?;
 
                 return Ok(Some(Stmt {
-                    id: ast::DUMMY_NODE_ID,
+                    id: DUMMY_NODE_ID,
                     node: StmtKind::Expr(expr),
                     span: lo.to(self.prev_span),
                 }));
@@ -114,17 +114,17 @@ impl<'a> Parser<'a> {
             // We used to incorrectly stop parsing macro-expanded statements here.
             // If the next token will be an error anyway but could have parsed with the
             // earlier behavior, stop parsing here and emit a warning to avoid breakage.
-            else if macro_legacy_warnings &&
-                    self.token.can_begin_expr() &&
-                    match self.token.kind {
-                // These can continue an expression, so we can't stop parsing and warn.
-                token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
-                token::BinOp(token::Minus) | token::BinOp(token::Star) |
-                token::BinOp(token::And) | token::BinOp(token::Or) |
-                token::AndAnd | token::OrOr |
-                token::DotDot | token::DotDotDot | token::DotDotEq => false,
-                _ => true,
-            } {
+            else if macro_legacy_warnings && self.token.can_begin_expr() &&
+                match self.token.kind {
+                    // These can continue an expression, so we can't stop parsing and warn.
+                    token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
+                    token::BinOp(token::Minus) | token::BinOp(token::Star) |
+                    token::BinOp(token::And) | token::BinOp(token::Or) |
+                    token::AndAnd | token::OrOr |
+                    token::DotDot | token::DotDotDot | token::DotDotEq => false,
+                    _ => true,
+                }
+            {
                 self.warn_missing_semicolon();
                 StmtKind::Mac(P((mac, style, attrs.into())))
             } else {
@@ -135,7 +135,7 @@ impl<'a> Parser<'a> {
                 StmtKind::Expr(e)
             };
             Stmt {
-                id: ast::DUMMY_NODE_ID,
+                id: DUMMY_NODE_ID,
                 span: lo.to(hi),
                 node,
             }
@@ -148,7 +148,7 @@ impl<'a> Parser<'a> {
 
             match item {
                 Some(i) => Stmt {
-                    id: ast::DUMMY_NODE_ID,
+                    id: DUMMY_NODE_ID,
                     span: lo.to(i.span),
                     node: StmtKind::Item(i),
                 },
@@ -178,7 +178,7 @@ impl<'a> Parser<'a> {
                         // an empty tuple that spans the excess semicolons
                         // to preserve this info until the lint stage
                         return Ok(Some(Stmt {
-                            id: ast::DUMMY_NODE_ID,
+                            id: DUMMY_NODE_ID,
                             span: lo.to(last_semi),
                             node: StmtKind::Semi(self.mk_expr(lo.to(last_semi),
                                 ExprKind::Tup(Vec::new()),
@@ -196,7 +196,7 @@ impl<'a> Parser<'a> {
                     let e = self.parse_expr_res(
                         Restrictions::STMT_EXPR, Some(attrs.into()))?;
                     Stmt {
-                        id: ast::DUMMY_NODE_ID,
+                        id: DUMMY_NODE_ID,
                         span: lo.to(e.span),
                         node: StmtKind::Expr(e),
                     }
@@ -218,7 +218,7 @@ impl<'a> Parser<'a> {
             match self.parse_ty() {
                 Ok(ty) => (None, Some(ty)),
                 Err(mut err) => {
-                    // Rewind to before attempting to parse the type and continue parsing
+                    // Rewind to before attempting to parse the type and continue parsing.
                     let parser_snapshot_after_type = self.clone();
                     mem::replace(self, parser_snapshot_before_type);
 
@@ -272,7 +272,7 @@ impl<'a> Parser<'a> {
             ty,
             pat,
             init,
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             span: lo.to(hi),
             attrs,
         }))
@@ -334,18 +334,18 @@ impl<'a> Parser<'a> {
             //    if (cond)
             //      bar;
             //
-            // Which is valid in other languages, but not Rust.
+            // which is valid in other languages, but not Rust.
             match self.parse_stmt_without_recovery(false) {
                 Ok(Some(stmt)) => {
                     if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
                         || do_not_suggest_help {
-                        // if the next token is an open brace (e.g., `if a b {`), the place-
-                        // inside-a-block suggestion would be more likely wrong than right
+                        // If the next token is an open brace (e.g., `if a b {`), the place-
+                        // inside-a-block suggestion would be more likely wrong than right.
                         e.span_label(sp, "expected `{`");
                         return Err(e);
                     }
                     let mut stmt_span = stmt.span;
-                    // expand the span to include the semicolon, if it exists
+                    // Expand the span to include the semicolon, if it exists.
                     if self.eat(&token::Semi) {
                         stmt_span = stmt_span.with_hi(self.prev_span.hi());
                     }
@@ -354,7 +354,7 @@ impl<'a> Parser<'a> {
                             stmt_span,
                             "try placing this code inside a block",
                             format!("{{ {} }}", snippet),
-                            // speculative, has been misleading in the past (#46836)
+                            // Speculative; has been misleading in the past (#46836).
                             Applicability::MaybeIncorrect,
                         );
                     }
@@ -399,7 +399,7 @@ impl<'a> Parser<'a> {
                     err.emit();
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     Some(Stmt {
-                        id: ast::DUMMY_NODE_ID,
+                        id: DUMMY_NODE_ID,
                         node: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)),
                         span: self.token.span,
                     })
@@ -415,15 +415,15 @@ impl<'a> Parser<'a> {
         }
         Ok(P(ast::Block {
             stmts,
-            id: ast::DUMMY_NODE_ID,
+            id: DUMMY_NODE_ID,
             rules: s,
             span: lo.to(self.prev_span),
         }))
     }
 
     /// Parses a statement, including the trailing semicolon.
-    crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
-        // skip looking for a trailing semicolon when we have an interpolated statement
+    pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+        // Skip looking for a trailing semicolon when we have an interpolated statement.
         maybe_whole!(self, NtStmt, |x| Some(x));
 
         let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs
index 6a789ef99d6..5cb59b3f827 100644
--- a/src/libsyntax/parse/tests.rs
+++ b/src/libsyntax/parse/tests.rs
@@ -25,12 +25,12 @@ fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
     new_parser_from_source_str(sess, name, source).parse_item()
 }
 
-// produce a syntax_pos::span
+// Produces a `syntax_pos::span`.
 fn sp(a: u32, b: u32) -> Span {
     Span::with_root_ctxt(BytePos(a), BytePos(b))
 }
 
-/// Parse a string, return an expr
+/// Parses a string, return an expression.
 fn string_to_expr(source_str : String) -> P<ast::Expr> {
     let ps = ParseSess::new(FilePathMapping::empty());
     with_error_checking_parse(source_str, &ps, |p| {
@@ -38,7 +38,7 @@ fn string_to_expr(source_str : String) -> P<ast::Expr> {
     })
 }
 
-/// Parse a string, return an item
+/// Parses a string, returns an item.
 fn string_to_item(source_str : String) -> Option<P<ast::Item>> {
     let ps = ParseSess::new(FilePathMapping::empty());
     with_error_checking_parse(source_str, &ps, |p| {
@@ -53,7 +53,7 @@ fn string_to_item(source_str : String) -> Option<P<ast::Item>> {
     })
 }
 
-// check the token-tree-ization of macros
+// Checks the token-tree-ization of macros.
 #[test]
 fn string_to_tts_macro () {
     with_default_globals(|| {