about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorAlexander Regueiro <alexreg@me.com>2019-02-08 14:53:55 +0100
committerAlexander Regueiro <alexreg@me.com>2019-02-10 23:42:32 +0000
commitc3e182cf43aea2c010a1915eb37293a458df2228 (patch)
tree225aa2dfceff56d10c0b31f6966fbf7ec5da8180 /src/libsyntax
parent0b7af2668a80fb2fa720a06ca44aff4dd1e9de38 (diff)
downloadrust-c3e182cf43aea2c010a1915eb37293a458df2228.tar.gz
rust-c3e182cf43aea2c010a1915eb37293a458df2228.zip
rustc: doc comments
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs29
-rw-r--r--src/libsyntax/attr/builtin.rs6
-rw-r--r--src/libsyntax/attr/mod.rs6
-rw-r--r--src/libsyntax/config.rs8
-rw-r--r--src/libsyntax/diagnostics/metadata.rs4
-rw-r--r--src/libsyntax/ext/base.rs34
-rw-r--r--src/libsyntax/ext/build.rs2
-rw-r--r--src/libsyntax/ext/expand.rs2
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs18
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs8
-rw-r--r--src/libsyntax/ext/tt/quoted.rs18
-rw-r--r--src/libsyntax/feature_gate.rs7
-rw-r--r--src/libsyntax/json.rs2
-rw-r--r--src/libsyntax/parse/lexer/comments.rs6
-rw-r--r--src/libsyntax/parse/lexer/mod.rs12
-rw-r--r--src/libsyntax/parse/mod.rs43
-rw-r--r--src/libsyntax/parse/parser.rs319
-rw-r--r--src/libsyntax/parse/token.rs20
-rw-r--r--src/libsyntax/print/pp.rs4
-rw-r--r--src/libsyntax/ptr.rs2
-rw-r--r--src/libsyntax/source_map.rs18
-rw-r--r--src/libsyntax/tokenstream.rs5
-rw-r--r--src/libsyntax/util/lev_distance.rs4
-rw-r--r--src/libsyntax/util/parser.rs4
-rw-r--r--src/libsyntax/util/parser_testing.rs2
-rw-r--r--src/libsyntax/visit.rs6
26 files changed, 300 insertions, 289 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 681d8eeaa0d..ab62dd2bc9b 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -129,14 +129,14 @@ impl PathSegment {
     }
 }
 
-/// Arguments of a path segment.
+/// The arguments of a path segment.
 ///
 /// E.g., `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum GenericArgs {
-    /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
+    /// The `<'a, A, B, C>` in `foo::bar::baz::<'a, A, B, C>`.
     AngleBracketed(AngleBracketedArgs),
-    /// The `(A,B)` and `C` in `Foo(A,B) -> C`
+    /// The `(A, B)` and `C` in `Foo(A, B) -> C`.
     Parenthesized(ParenthesizedArgs),
 }
 
@@ -180,16 +180,15 @@ impl GenericArg {
     }
 }
 
-/// A path like `Foo<'a, T>`
+/// A path like `Foo<'a, T>`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)]
 pub struct AngleBracketedArgs {
-    /// Overall span
+    /// The overall span.
     pub span: Span,
     /// The arguments for this path segment.
     pub args: Vec<GenericArg>,
     /// Bindings (equality constraints) on associated types, if present.
-    ///
-    /// E.g., `Foo<A=Bar>`.
+    /// E.g., `Foo<A = Bar>`.
     pub bindings: Vec<TypeBinding>,
 }
 
@@ -205,7 +204,7 @@ impl Into<Option<P<GenericArgs>>> for ParenthesizedArgs {
     }
 }
 
-/// A path like `Foo(A,B) -> C`
+/// A path like `Foo(A, B) -> C`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct ParenthesizedArgs {
     /// Overall span
@@ -270,7 +269,7 @@ impl serialize::UseSpecializedDecodable for NodeId {
     }
 }
 
-/// Node id used to represent the root of the crate.
+/// `NodeId` used to represent the root of the crate.
 pub const CRATE_NODE_ID: NodeId = NodeId::from_u32_const(0);
 
 /// When parsing and doing expansions, we initially give all AST nodes this AST
@@ -369,7 +368,7 @@ impl Default for Generics {
     }
 }
 
-/// A `where` clause in a definition
+/// A where-clause in a definition.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct WhereClause {
     pub id: NodeId,
@@ -377,7 +376,7 @@ pub struct WhereClause {
     pub span: Span,
 }
 
-/// A single predicate in a `where` clause
+/// A single predicate in a where-clause.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum WherePredicate {
     /// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`).
@@ -1592,7 +1591,7 @@ pub struct BareFnTy {
     pub decl: P<FnDecl>,
 }
 
-/// The different kinds of types recognized by the compiler.
+/// The various kinds of type recognized by the compiler.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum TyKind {
     /// A variable-length slice (`[T]`).
@@ -1894,7 +1893,7 @@ impl fmt::Debug for ImplPolarity {
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum FunctionRetTy {
-    /// Return type is not specified.
+    /// Returns type is not specified.
     ///
     /// Functions default to `()` and closures default to inference.
     /// Span points to where return type would be inserted.
@@ -2036,10 +2035,10 @@ pub struct Attribute {
 
 /// `TraitRef`s appear in impls.
 ///
-/// Resolve maps each `TraitRef`'s `ref_id` to its defining trait; that's all
+/// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all
 /// that the `ref_id` is for. The `impl_id` maps to the "self type" of this impl.
 /// If this impl is an `ItemKind::Impl`, the `impl_id` is redundant (it could be the
-/// same as the impl's node-id).
+/// same as the impl's `NodeId`).
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct TraitRef {
     pub path: Path,
diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs
index 6f7761b54fc..520984b8091 100644
--- a/src/libsyntax/attr/builtin.rs
+++ b/src/libsyntax/attr/builtin.rs
@@ -163,7 +163,7 @@ pub struct RustcDeprecation {
     pub suggestion: Option<Symbol>,
 }
 
-/// Check if `attrs` contains an attribute like `#![feature(feature_name)]`.
+/// Checks if `attrs` contains an attribute like `#![feature(feature_name)]`.
 /// This will not perform any "sanity checks" on the form of the attributes.
 pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
     attrs.iter().any(|item| {
@@ -177,7 +177,7 @@ pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
     })
 }
 
-/// Find the first stability attribute. `None` if none exists.
+/// Finds the first stability attribute. `None` if none exists.
 pub fn find_stability(sess: &ParseSess, attrs: &[Attribute],
                       item_sp: Span) -> Option<Stability> {
     find_stability_generic(sess, attrs.iter(), item_sp)
@@ -580,7 +580,7 @@ pub struct Deprecation {
     pub note: Option<Symbol>,
 }
 
-/// Find the deprecation attribute. `None` if none exists.
+/// Finds the deprecation attribute. `None` if none exists.
 pub fn find_deprecation(sess: &ParseSess, attrs: &[Attribute],
                         item_sp: Span) -> Option<Deprecation> {
     find_deprecation_generic(sess, attrs.iter(), item_sp)
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index 0c3aedae715..a4f5449ec54 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -85,7 +85,7 @@ impl NestedMetaItem {
         self.span
     }
 
-    /// Returns true if this list item is a MetaItem with a name of `name`.
+    /// Returns `true` if this list item is a MetaItem with a name of `name`.
     pub fn check_name(&self, name: &str) -> bool {
         self.meta_item().map_or(false, |meta_item| meta_item.check_name(name))
     }
@@ -272,7 +272,7 @@ impl MetaItem {
 }
 
 impl Attribute {
-    /// Extract the MetaItem from inside this Attribute.
+    /// Extracts the MetaItem from inside this Attribute.
     pub fn meta(&self) -> Option<MetaItem> {
         let mut tokens = self.tokens.trees().peekable();
         Some(MetaItem {
@@ -328,7 +328,7 @@ impl Attribute {
         })
     }
 
-    /// Convert self to a normal #[doc="foo"] comment, if it is a
+    /// Converts self to a normal #[doc="foo"] comment, if it is a
     /// comment like `///` or `/** */`. (Returns self unchanged for
     /// non-sugared doc attributes.)
     pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index bfc4457f054..5bab9e4e2c9 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -88,7 +88,7 @@ impl<'a> StripUnconfigured<'a> {
     ///
     /// Gives a compiler warning when the `cfg_attr` contains no attributes and
     /// is in the original source file. Gives a compiler error if the syntax of
-    /// the attribute is incorrect
+    /// the attribute is incorrect.
     fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
         if !attr.check_name("cfg_attr") {
             return vec![attr];
@@ -146,7 +146,7 @@ impl<'a> StripUnconfigured<'a> {
         }
     }
 
-    /// Determine if a node with the given attributes should be included in this configuration.
+    /// Determines if a node with the given attributes should be included in this configuration.
     pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {
         attrs.iter().all(|attr| {
             if !is_cfg(attr) {
@@ -282,8 +282,8 @@ impl<'a> StripUnconfigured<'a> {
         }
     }
 
-    // deny #[cfg] on generic parameters until we decide what to do with it.
-    // see issue #51279.
+    /// Denies `#[cfg]` on generic parameters until we decide what to do with it.
+    /// See issue #51279.
     pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) {
         for attr in param.attrs() {
             let offending_attr = if attr.check_name("cfg") {
diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs
index 3abb820a678..704135fe1d5 100644
--- a/src/libsyntax/diagnostics/metadata.rs
+++ b/src/libsyntax/diagnostics/metadata.rs
@@ -34,7 +34,7 @@ pub struct ErrorLocation {
 }
 
 impl ErrorLocation {
-    /// Create an error location from a span.
+    /// Creates an error location from a span.
     pub fn from_span(ecx: &ExtCtxt<'_>, sp: Span) -> ErrorLocation {
         let loc = ecx.source_map().lookup_char_pos_adj(sp.lo());
         ErrorLocation {
@@ -44,7 +44,7 @@ impl ErrorLocation {
     }
 }
 
-/// Get the directory where metadata for a given `prefix` should be stored.
+/// Gets the directory where metadata for a given `prefix` should be stored.
 ///
 /// See `output_metadata`.
 pub fn get_metadata_dir(prefix: &str) -> PathBuf {
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 465b53184dc..fcb349205e3 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -327,34 +327,34 @@ macro_rules! make_stmts_default {
 /// The result of a macro expansion. The return values of the various
 /// methods are spliced into the AST at the callsite of the macro.
 pub trait MacResult {
-    /// Create an expression.
+    /// Creates an expression.
     fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
         None
     }
-    /// Create zero or more items.
+    /// Creates zero or more items.
     fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
         None
     }
 
-    /// Create zero or more impl items.
+    /// Creates zero or more impl items.
     fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
         None
     }
 
-    /// Create zero or more trait items.
+    /// Creates zero or more trait items.
     fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
         None
     }
 
-    /// Create zero or more items in an `extern {}` block
+    /// Creates zero or more items in an `extern {}` block
     fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None }
 
-    /// Create a pattern.
+    /// Creates a pattern.
     fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
         None
     }
 
-    /// Create zero or more statements.
+    /// Creates zero or more statements.
     ///
     /// By default this attempts to create an expression statement,
     /// returning None if that fails.
@@ -461,7 +461,7 @@ pub struct DummyResult {
 }
 
 impl DummyResult {
-    /// Create a default MacResult that can be anything.
+    /// Creates a default MacResult that can be anything.
     ///
     /// Use this as a return value after hitting any errors and
     /// calling `span_err`.
@@ -474,7 +474,7 @@ impl DummyResult {
         Box::new(DummyResult { expr_only: false, is_error: false, span })
     }
 
-    /// Create a default MacResult that can only be an expression.
+    /// Creates a default MacResult that can only be an expression.
     ///
     /// Use this for macros that must expand to an expression, so even
     /// if an error is encountered internally, the user will receive
@@ -677,7 +677,7 @@ pub enum SyntaxExtension {
 }
 
 impl SyntaxExtension {
-    /// Return which kind of macro calls this syntax extension.
+    /// Returns which kind of macro calls this syntax extension.
     pub fn kind(&self) -> MacroKind {
         match *self {
             SyntaxExtension::DeclMacro { .. } |
@@ -835,8 +835,8 @@ impl<'a> ExtCtxt<'a> {
         expand::MacroExpander::new(self, false)
     }
 
-    /// Returns a `Folder` that deeply expands all macros and assigns all node ids in an AST node.
-    /// Once node ids are assigned, the node may not be expanded, removed, or otherwise modified.
+    /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node.
+    /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified.
     pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
         expand::MacroExpander::new(self, true)
     }
@@ -976,9 +976,9 @@ impl<'a> ExtCtxt<'a> {
     }
 }
 
-/// Extract a string literal from the macro expanded version of `expr`,
+/// Extracts a string literal from the macro expanded version of `expr`,
 /// emitting `err_msg` if `expr` is not a string literal. This does not stop
-/// compilation on error, merely emits a non-fatal error and returns None.
+/// compilation on error, merely emits a non-fatal error and returns `None`.
 pub fn expr_to_spanned_string<'a>(
     cx: &'a mut ExtCtxt<'_>,
     mut expr: P<ast::Expr>,
@@ -1022,7 +1022,7 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>,
 }
 
 /// Interpreting `tts` as a comma-separated sequence of expressions,
-/// expect exactly one string literal, or emit an error and return None.
+/// expect exactly one string literal, or emit an error and return `None`.
 pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
                                sp: Span,
                                tts: &[tokenstream::TokenTree],
@@ -1044,8 +1044,8 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
     })
 }
 
-/// Extract comma-separated expressions from `tts`. If there is a
-/// parsing error, emit a non-fatal error and return None.
+/// Extracts comma-separated expressions from `tts`. If there is a
+/// parsing error, emit a non-fatal error and return `None`.
 pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
                           sp: Span,
                           tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 6708e3c12a0..48f6e4c0c82 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -347,7 +347,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
 
     /// Constructs a qualified path.
     ///
-    /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A=Bar>`.
+    /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A = Bar>`.
     fn qpath_all(&self,
                  self_type: P<ast::Ty>,
                  trait_path: ast::Path,
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 89d59478a5d..3b97242daa1 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -444,7 +444,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         }
     }
 
-    /// Collect all macro invocations reachable at this time in this AST fragment, and replace
+    /// Collects all macro invocations reachable at this time in this AST fragment, and replace
     /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s.
     /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and
     /// prepares data for resolving paths of macro invocations.
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index a9000b89fb4..d4ea3b81a60 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -1,4 +1,4 @@
-//! This is an NFA-based parser, which calls out to the main rust parser for named nonterminals
+//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals
 //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
 //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
 //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
@@ -22,7 +22,7 @@
 //!
 //! As it processes them, it fills up `eof_items` with threads that would be valid if
 //! the macro invocation is now over, `bb_items` with threads that are waiting on
-//! a Rust nonterminal like `$e:expr`, and `next_items` with threads that are waiting
+//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
 //! on a particular token. Most of the logic concerns moving the · through the
 //! repetitions indicated by Kleene stars. The rules for moving the · without
 //! consuming any input are called epsilon transitions. It only advances or calls
@@ -216,7 +216,7 @@ struct MatcherPos<'root, 'tt: 'root> {
 }
 
 impl<'root, 'tt> MatcherPos<'root, 'tt> {
-    /// Add `m` as a named match for the `idx`-th metavar.
+    /// Adds `m` as a named match for the `idx`-th metavar.
     fn push_match(&mut self, idx: usize, m: NamedMatch) {
         let matches = Rc::make_mut(&mut self.matches[idx]);
         matches.push(m);
@@ -304,7 +304,7 @@ fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
     }.into_boxed_slice()
 }
 
-/// Generate the top-level matcher position in which the "dot" is before the first token of the
+/// Generates the top-level matcher position in which the "dot" is before the first token of the
 /// matcher `ms` and we are going to start matching at the span `open` in the source.
 fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> {
     let match_idx_hi = count_names(ms);
@@ -337,7 +337,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
 
 /// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
 /// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
 /// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
 /// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
 ///
@@ -414,7 +414,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
     Success(ret_val)
 }
 
-/// Generate an appropriate parsing failure message. For EOF, this is "unexpected end...". For
+/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
 /// other tokens, this is "unexpected token...".
 pub fn parse_failure_msg(tok: Token) -> String {
     match tok {
@@ -426,7 +426,7 @@ pub fn parse_failure_msg(tok: Token) -> String {
     }
 }
 
-/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
+/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
 fn token_name_eq(t1: &Token, t2: &Token) -> bool {
     if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
         id1.name == id2.name && is_raw1 == is_raw2
@@ -880,7 +880,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
     }
 }
 
-/// A call to the "black-box" parser to parse some rust nonterminal.
+/// A call to the "black-box" parser to parse some Rust non-terminal.
 ///
 /// # Parameters
 ///
@@ -891,7 +891,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
 ///
 /// # Returns
 ///
-/// The parsed nonterminal.
+/// The parsed non-terminal.
 fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
     if name == "tt" {
         return token::NtTT(p.parse_token_tree());
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 33ea675f9d1..897113ba885 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -432,7 +432,7 @@ fn check_lhs_nt_follows(sess: &ParseSess,
     // after parsing/expansion. we can report every error in every macro this way.
 }
 
-/// Check that the lhs contains no repetition which could match an empty token
+/// Checks that the lhs contains no repetition which could match an empty token
 /// tree, because then the matcher would hang indefinitely.
 fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
     use quoted::TokenTree;
@@ -960,8 +960,8 @@ fn token_can_be_followed_by_any(tok: &quoted::TokenTree) -> bool {
     }
 }
 
-/// True if a fragment of type `frag` can be followed by any sort of
-/// token.  We use this (among other things) as a useful approximation
+/// Returns `true` if a fragment of type `frag` can be followed by any sort of
+/// token. We use this (among other things) as a useful approximation
 /// for when `frag` can be followed by a repetition like `$(...)*` or
 /// `$(...)+`. In general, these can be a bit tricky to reason about,
 /// so we adopt a conservative position that says that any fragment
@@ -990,7 +990,7 @@ enum IsInFollow {
     Invalid(String, &'static str),
 }
 
-/// True if `frag` can legally be followed by the token `tok`. For
+/// Returns `true` if `frag` can legally be followed by the token `tok`. For
 /// fragments that can consume an unbounded number of tokens, `tok`
 /// must be within a well-defined follow set. This is intended to
 /// guarantee future compatibility: for example, without this rule, if
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 6c3cf3e6312..255795f28c7 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -22,17 +22,17 @@ pub struct Delimited {
 }
 
 impl Delimited {
-    /// Return the opening delimiter (possibly `NoDelim`).
+    /// Returns the opening delimiter (possibly `NoDelim`).
     pub fn open_token(&self) -> token::Token {
         token::OpenDelim(self.delim)
     }
 
-    /// Return the closing delimiter (possibly `NoDelim`).
+    /// Returns the closing delimiter (possibly `NoDelim`).
     pub fn close_token(&self) -> token::Token {
         token::CloseDelim(self.delim)
     }
 
-    /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
     pub fn open_tt(&self, span: Span) -> TokenTree {
         let open_span = if span.is_dummy() {
             span
@@ -42,7 +42,7 @@ impl Delimited {
         TokenTree::Token(open_span, self.open_token())
     }
 
-    /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
     pub fn close_tt(&self, span: Span) -> TokenTree {
         let close_span = if span.is_dummy() {
             span
@@ -107,7 +107,7 @@ impl TokenTree {
         }
     }
 
-    /// Returns true if the given token tree contains no other tokens. This is vacuously true for
+    /// Returns `true` if the given token tree contains no other tokens. This is vacuously true for
     /// single tokens or metavar/decls, but may be false for delimited trees or sequences.
     pub fn is_empty(&self) -> bool {
         match *self {
@@ -120,7 +120,7 @@ impl TokenTree {
         }
     }
 
-    /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
+    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
@@ -140,7 +140,7 @@ impl TokenTree {
         }
     }
 
-    /// Retrieve the `TokenTree`'s span.
+    /// Retrieves the `TokenTree`'s span.
     pub fn span(&self) -> Span {
         match *self {
             TokenTree::Token(sp, _)
@@ -411,8 +411,8 @@ where
 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
 ///
-/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018,
-/// `?` is a Kleene op and not a separator.
+/// N.B., in the 2015 edition, `*` and `+` are the only Kleene operators, and `?` is a separator.
+/// In the 2018 edition however, `?` is a Kleene operator, and not a separator.
 fn parse_sep_and_kleene_op<I>(
     input: &mut Peekable<I>,
     span: Span,
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index 0853b4399d2..826149267e9 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -737,7 +737,7 @@ pub struct AttributeTemplate {
 }
 
 impl AttributeTemplate {
-    /// Check that the given meta-item is compatible with this template.
+    /// Checks that the given meta-item is compatible with this template.
     fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool {
         match meta_item_kind {
             ast::MetaItemKind::Word => self.word,
@@ -749,7 +749,7 @@ impl AttributeTemplate {
 }
 
 /// A convenience macro for constructing attribute templates.
-/// E.g. `template!(Word, List: "description")` means that the attribute
+/// E.g., `template!(Word, List: "description")` means that the attribute
 /// supports forms `#[attr]` and `#[attr(description)]`.
 macro_rules! template {
     (Word) => { template!(@ true, None, None) };
@@ -2145,8 +2145,7 @@ pub fn check_crate(krate: &ast::Crate,
 
 #[derive(Clone, Copy, Hash)]
 pub enum UnstableFeatures {
-    /// Hard errors for unstable features are active, as on
-    /// beta/stable channels.
+    /// Hard errors for unstable features are active, as on beta/stable channels.
     Disallow,
     /// Allow features to be activated, as on nightly.
     Allow,
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index 2953b35298e..af785050532 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -342,7 +342,7 @@ impl DiagnosticSpanLine {
         }
     }
 
-    /// Create a list of DiagnosticSpanLines from span - each line with any part
+    /// Creates a list of DiagnosticSpanLines from span - each line with any part
     /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
     /// `span` within the line.
     fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 4632d814d5c..74fff3324ea 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -197,9 +197,9 @@ fn read_line_comments(rdr: &mut StringReader<'_>,
     }
 }
 
-/// Returns None if the first col chars of s contain a non-whitespace char.
-/// Otherwise returns Some(k) where k is first char offset after that leading
-/// whitespace.  Note k may be outside bounds of s.
+/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
+/// Otherwise returns `Some(k)` where `k` is first char offset after that leading
+/// whitespace. Note that `k` may be outside bounds of `s`.
 fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
     let mut idx = 0;
     for (i, ch) in s.char_indices().take(col.to_usize()) {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index d3fc1c03634..9168d4b61c1 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -112,7 +112,7 @@ impl<'a> StringReader<'a> {
         self.unwrap_or_abort(res)
     }
 
-    /// Return the next token. EFFECT: advances the string_reader.
+    /// Returns the next token. EFFECT: advances the string_reader.
     pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
         assert!(self.fatal_errs.is_empty());
         let ret_val = TokenAndSpan {
@@ -425,7 +425,7 @@ impl<'a> StringReader<'a> {
         self.with_str_from_to(start, self.pos, f)
     }
 
-    /// Create a Name from a given offset to the current offset, each
+    /// Creates a Name from a given offset to the current offset, each
     /// adjusted 1 towards each other (assumes that on either side there is a
     /// single-byte delimiter).
     fn name_from(&self, start: BytePos) -> ast::Name {
@@ -670,7 +670,7 @@ impl<'a> StringReader<'a> {
     }
 
     /// If there is whitespace, shebang, or a comment, scan it. Otherwise,
-    /// return None.
+    /// return `None`.
     fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
         match self.ch.unwrap_or('\0') {
             // # to handle shebang at start of file -- this is the entry point
@@ -920,7 +920,7 @@ impl<'a> StringReader<'a> {
     /// in a byte, (non-raw) byte string, char, or (non-raw) string literal.
     /// `start` is the position of `first_source_char`, which is already consumed.
     ///
-    /// Returns true if there was a valid char/byte, false otherwise.
+    /// Returns `true` if there was a valid char/byte.
     fn scan_char_or_byte(&mut self,
                          start: BytePos,
                          first_source_char: char,
@@ -1152,7 +1152,7 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    /// Check that a base is valid for a floating literal, emitting a nice
+    /// Checks that a base is valid for a floating literal, emitting a nice
     /// error if it isn't.
     fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
         match base {
@@ -1185,7 +1185,7 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    /// Return the next token from the string, advances the input past that
+    /// Returns the next token from the string, advances the input past that
     /// token, and updates the interner
     fn next_token_inner(&mut self) -> Result<token::Token, ()> {
         let c = self.ch;
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 317d6933207..69940ae621c 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -1,4 +1,4 @@
-//! The main parser interface
+//! The main parser interface.
 
 use crate::ast::{self, CrateConfig, NodeId};
 use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
@@ -38,12 +38,11 @@ pub struct ParseSess {
     pub unstable_features: UnstableFeatures,
     pub config: CrateConfig,
     pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
-    /// Places where raw identifiers were used. This is used for feature gating
-    /// raw identifiers
+    /// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
     pub raw_identifier_spans: Lock<Vec<Span>>,
-    /// The registered diagnostics codes
+    /// The registered diagnostics codes.
     crate registered_diagnostics: Lock<ErrorMap>,
-    /// Used to determine and report recursive mod inclusions
+    /// Used to determine and report recursive module inclusions.
     included_mod_stack: Lock<Vec<PathBuf>>,
     source_map: Lrc<SourceMap>,
     pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
@@ -146,12 +145,12 @@ pub fn parse_stream_from_source_str(
     source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
-/// Create a new parser from a source string
+/// Creates a new parser from a source string.
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
 }
 
-/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
+/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
 /// token stream.
 pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
     -> Result<Parser<'_>, Vec<Diagnostic>>
@@ -162,13 +161,13 @@ pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source
     Ok(parser)
 }
 
-/// Create a new parser, handling errors as appropriate
+/// Creates a new parser, handling errors as appropriate
 /// if the file doesn't exist
 pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
     source_file_to_parser(sess, file_to_source_file(sess, path, None))
 }
 
-/// Create a new parser, returning buffered diagnostics if the file doesn't
+/// Creates a new parser, returning buffered diagnostics if the file doesn't
 /// exist or from lexing the initial token stream.
 pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path)
     -> Result<Parser<'a>, Vec<Diagnostic>> {
@@ -239,7 +238,7 @@ fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a session and a path and an optional span (for error reporting),
-/// add the path to the session's source_map and return the new source_file.
+/// add the path to the session's `source_map` and return the new `source_file`.
 fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
                    -> Lrc<SourceFile> {
     match try_file_to_source_file(sess, path, spanopt) {
@@ -251,7 +250,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     }
 }
 
-/// Given a source_file, produce a sequence of token-trees
+/// Given a source_file, produces a sequence of token trees.
 pub fn source_file_to_stream(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
@@ -260,7 +259,7 @@ pub fn source_file_to_stream(
     panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
 }
 
-/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
+/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
 /// parsing the token tream.
 pub fn maybe_file_to_stream(
     sess: &ParseSess,
@@ -295,12 +294,12 @@ pub fn maybe_file_to_stream(
     }
 }
 
-/// Given stream and the `ParseSess`, produce a parser
+/// Given stream and the `ParseSess`, produces a parser.
 pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
     Parser::new(sess, stream, None, true, false)
 }
 
-/// Parse a string representing a character literal into its final form.
+/// Parses a string representing a character literal into its final form.
 /// Rather than just accepting/rejecting a given literal, unescapes it as
 /// well. Can take any slice prefixed by a character escape. Returns the
 /// character and the number of characters consumed.
@@ -359,15 +358,14 @@ fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) {
     }
 }
 
-/// Parse a string representing a string literal into its final form. Does
-/// unescaping.
+/// Parses a string representing a string literal into its final form. Does unescaping.
 pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
     debug!("str_lit: given {}", lit.escape_default());
     let mut res = String::with_capacity(lit.len());
 
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
-    /// Eat everything up to a non-whitespace
+    /// Eat everything up to a non-whitespace.
     fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) {
         loop {
             match it.peek().map(|x| x.1) {
@@ -428,7 +426,7 @@ pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
     res
 }
 
-/// Parse a string representing a raw string literal into its final form. The
+/// Parses a string representing a raw string literal into its final form. The
 /// only operation this does is convert embedded CRLF into a single LF.
 fn raw_str_lit(lit: &str) -> String {
     debug!("raw_str_lit: given {}", lit.escape_default());
@@ -554,7 +552,7 @@ fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
     filtered_float_lit(Symbol::intern(s), suffix, diag)
 }
 
-/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
+/// Parses a string representing a byte literal into its final form. Similar to `char_lit`.
 fn byte_lit(lit: &str) -> (u8, usize) {
     let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
 
@@ -591,7 +589,7 @@ fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> {
 
     let error = |i| panic!("lexer should have rejected {} at {}", lit, i);
 
-    /// Eat everything up to a non-whitespace
+    /// Eat everything up to a non-whitespace.
     fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
         loop {
             match it.peek().map(|x| x.1) {
@@ -758,10 +756,11 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
     })
 }
 
-/// `SeqSep` : a sequence separator (token)
-/// and whether a trailing separator is allowed.
+/// A sequence separator.
 pub struct SeqSep {
+    /// The seperator token.
     pub sep: Option<token::Token>,
+    /// `true` if a trailing separator is allowed.
     pub trailing_sep_allowed: bool,
 }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 69d6407d506..67154305735 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -75,7 +75,7 @@ bitflags::bitflags! {
 
 type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
 
-/// How to parse a path.
+/// Specifies how to parse a path.
 #[derive(Copy, Clone, PartialEq)]
 pub enum PathStyle {
     /// In some contexts, notably in expressions, paths with generic arguments are ambiguous
@@ -111,7 +111,7 @@ enum BlockMode {
     Ignore,
 }
 
-/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression
+/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
 /// dropped into the token stream, which happens while parsing the result of
 /// macro expansion). Placement of these is not as complex as I feared it would
 /// be. The important thing is to make sure that lookahead doesn't balk at
@@ -420,11 +420,11 @@ impl TokenType {
     }
 }
 
-/// Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT<u8, u8>`,
+/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
 /// `IDENT<<u8 as Trait>::AssocTy>`.
 ///
 /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
-/// that IDENT is not the ident of a fn trait
+/// that `IDENT` is not the ident of a fn trait.
 fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
     t == &token::ModSep || t == &token::Lt ||
     t == &token::BinOp(token::Shl)
@@ -525,7 +525,7 @@ impl From<P<Expr>> for LhsExpr {
     }
 }
 
-/// Create a placeholder argument.
+/// Creates a placeholder argument.
 fn dummy_arg(span: Span) -> Arg {
     let ident = Ident::new(keywords::Invalid.name(), span);
     let pat = P(Pat {
@@ -614,7 +614,7 @@ impl<'a> Parser<'a> {
         next
     }
 
-    /// Convert the current token to a string using self's reader
+    /// Converts the current token to a string using `self`'s reader.
     pub fn this_token_to_string(&self) -> String {
         pprust::token_to_string(&self.token)
     }
@@ -649,8 +649,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume the token t. Signal an error if
-    /// the next token is not t.
+    /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
     pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
@@ -867,7 +866,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// returns the span of expr, if it was not interpolated or the span of the interpolated token
+    /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
     fn interpolated_or_expr_span(&self,
                                  expr: PResult<'a, P<Expr>>)
                                  -> PResult<'a, (Span, P<Expr>)> {
@@ -941,7 +940,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Check if the next token is `tok`, and return `true` if so.
+    /// Checks if the next token is `tok`, and returns `true` if so.
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
     /// encountered.
@@ -951,8 +950,7 @@ impl<'a> Parser<'a> {
         is_present
     }
 
-    /// Consume token 'tok' if it exists. Returns true if the given
-    /// token was present, false otherwise.
+    /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
     pub fn eat(&mut self, tok: &token::Token) -> bool {
         let is_present = self.check(tok);
         if is_present { self.bump() }
@@ -964,8 +962,8 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw)
     }
 
-    /// If the next token is the given keyword, eat it and return
-    /// true. Otherwise, return false.
+    /// If the next token is the given keyword, eats it and returns
+    /// `true`. Otherwise, returns `false`.
     pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
         if self.check_keyword(kw) {
             self.bump();
@@ -984,9 +982,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// If the given word is not a keyword, signal an error.
-    /// If the next token is not the given word, signal an error.
-    /// Otherwise, eat it.
+    /// If the given word is not a keyword, signals an error.
+    /// If the next token is not the given word, signals an error.
+    /// Otherwise, eats it.
     fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
         if !self.eat_keyword(kw) {
             self.unexpected()
@@ -1031,11 +1029,11 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume a `+`. if `+=` is seen, replace it with a `=`
-    /// and continue. If a `+` is not seen, return false.
+    /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
+    /// and continues. If a `+` is not seen, returns `false`.
     ///
-    /// This is using when token splitting += into +.
-    /// See issue 47856 for an example of when this may occur.
+    /// This is used when token-splitting `+=` into `+`.
+    /// See issue #47856 for an example of when this may occur.
     fn eat_plus(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
         match self.token {
@@ -1054,7 +1052,7 @@ impl<'a> Parser<'a> {
 
 
     /// Checks to see if the next token is either `+` or `+=`.
-    /// Otherwise returns false.
+    /// Otherwise returns `false`.
     fn check_plus(&mut self) -> bool {
         if self.token.is_like_plus() {
             true
@@ -1065,8 +1063,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume an `&`. If `&&` is seen, replace it with a single
-    /// `&` and continue. If an `&` is not seen, signal an error.
+    /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
+    /// `&` and continues. If an `&` is not seen, signals an error.
     fn expect_and(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
         match self.token {
@@ -1082,8 +1080,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume an `|`. If `||` is seen, replace it with a single
-    /// `|` and continue. If an `|` is not seen, signal an error.
+    /// Expects and consumes an `|`. If `||` is seen, replaces it with a single
+    /// `|` and continues. If an `|` is not seen, signals an error.
     fn expect_or(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
         match self.token {
@@ -1115,9 +1113,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Attempt to consume a `<`. If `<<` is seen, replace it with a single
-    /// `<` and continue. If `<-` is seen, replace it with a single `<`
-    /// and continue. If a `<` is not seen, return false.
+    /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
+    /// `<` and continue. If `<-` is seen, replaces it with a single `<`
+    /// and continue. If a `<` is not seen, returns false.
     ///
     /// This is meant to be used when parsing generics on a path to get the
     /// starting token.
@@ -1159,9 +1157,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect and consume a GT. if a >> is seen, replace it
-    /// with a single > and continue. If a GT is not seen,
-    /// signal an error.
+    /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
+    /// with a single `>` and continues. If a `>` is not seen, signals an error.
     fn expect_gt(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::Gt));
         let ate = match self.token {
@@ -1196,7 +1193,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Eat and discard tokens until one of `kets` is encountered. Respects token trees,
+    /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
     fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
         let handler = self.diagnostic();
@@ -1209,8 +1206,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse a sequence, including the closing delimiter. The function
-    /// f must consume tokens until reaching the next separator or
+    /// Parses a sequence, including the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     pub fn parse_seq_to_end<T, F>(&mut self,
                                   ket: &token::Token,
@@ -1226,8 +1223,8 @@ impl<'a> Parser<'a> {
         Ok(val)
     }
 
-    /// Parse a sequence, not including the closing delimiter. The function
-    /// f must consume tokens until reaching the next separator or
+    /// Parses a sequence, not including the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     pub fn parse_seq_to_before_end<T, F>(
         &mut self,
@@ -1311,8 +1308,8 @@ impl<'a> Parser<'a> {
         Ok((v, recovered))
     }
 
-    /// Parse a sequence, including the closing delimiter. The function
-    /// f must consume tokens until reaching the next separator or
+    /// Parses a sequence, including the closing delimiter. The function
+    /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     fn parse_unspanned_seq<T, F>(
         &mut self,
@@ -1429,15 +1426,14 @@ impl<'a> Parser<'a> {
         &self.sess.span_diagnostic
     }
 
-    /// Is the current token one of the keywords that signals a bare function
-    /// type?
+    /// Is the current token one of the keywords that signals a bare function type?
     fn token_is_bare_fn_keyword(&mut self) -> bool {
         self.check_keyword(keywords::Fn) ||
             self.check_keyword(keywords::Unsafe) ||
             self.check_keyword(keywords::Extern)
     }
 
-    /// parse a `TyKind::BareFn` type:
+    /// Parses a `TyKind::BareFn` type.
     fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
         /*
 
@@ -1474,7 +1470,7 @@ impl<'a> Parser<'a> {
         })))
     }
 
-    /// Parse asyncness: `async` or nothing
+    /// Parses asyncness: `async` or nothing.
     fn parse_asyncness(&mut self) -> IsAsync {
         if self.eat_keyword(keywords::Async) {
             IsAsync::Async {
@@ -1486,7 +1482,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse unsafety: `unsafe` or nothing.
+    /// Parses unsafety: `unsafe` or nothing.
     fn parse_unsafety(&mut self) -> Unsafety {
         if self.eat_keyword(keywords::Unsafe) {
             Unsafety::Unsafe
@@ -1495,7 +1491,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse the items in a trait declaration
+    /// Parses the items in a trait declaration.
     pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
         maybe_whole!(self, NtTraitItem, |x| x);
         let attrs = self.parse_outer_attributes()?;
@@ -1612,7 +1608,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse optional return type [ -> TY ] in function decl
+    /// Parses an optional return type `[ -> TY ]` in a function declaration.
     fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
         if self.eat(&token::RArrow) {
             Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?))
@@ -1621,12 +1617,13 @@ impl<'a> Parser<'a> {
         }
     }
 
-    // Parse a type
+    /// Parses a type.
     pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
         self.parse_ty_common(true, true)
     }
 
-    /// Parse a type in restricted contexts where `+` is not permitted.
+    /// Parses a type in restricted contexts where `+` is not permitted.
+    ///
     /// Example 1: `&'a TYPE`
     ///     `+` is prohibited to maintain operator priority (P(+) < P(&)).
     /// Example 2: `value1 as TYPE + value2`
@@ -1929,7 +1926,8 @@ impl<'a> Parser<'a> {
         self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
-    /// Skip unexpected attributes and doc comments in this position and emit an appropriate error.
+    /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+    /// error.
     fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
         if let token::DocComment(_) = self.token {
             let mut err = self.diagnostic().struct_span_err(
@@ -1958,8 +1956,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// This version of parse arg doesn't necessarily require
-    /// identifier names.
+    /// This version of parse arg doesn't necessarily require identifier names.
     fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> {
         maybe_whole!(self, NtArg, |x| x);
 
@@ -2067,12 +2064,12 @@ impl<'a> Parser<'a> {
         Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
     }
 
-    /// Parse a single function argument
+    /// Parses a single function argument.
     crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
         self.parse_arg_general(true, false)
     }
 
-    /// Parse an argument in a lambda header e.g., |arg, arg|
+    /// Parses an argument in a lambda header (e.g., `|arg, arg|`).
     fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
         let pat = self.parse_pat(Some("argument name"))?;
         let t = if self.eat(&token::Colon) {
@@ -2099,7 +2096,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Matches token_lit = LIT_INTEGER | ...
+    /// Matches `token_lit = LIT_INTEGER | ...`.
     fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
         let out = match self.token {
             token::Interpolated(ref nt) => match nt.0 {
@@ -2165,7 +2162,7 @@ impl<'a> Parser<'a> {
         Ok(out)
     }
 
-    /// Matches lit = true | false | token_lit
+    /// Matches `lit = true | false | token_lit`.
     crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
         let lo = self.span;
         let lit = if self.eat_keyword(keywords::True) {
@@ -2179,7 +2176,7 @@ impl<'a> Parser<'a> {
         Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
     }
 
-    /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat)
+    /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
     crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
@@ -2221,7 +2218,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parses qualified path.
+    /// Parses a qualified path.
     /// Assumes that the leading `<` has been parsed already.
     ///
     /// `qualified_path = <type [as trait_ref]>::path`
@@ -2297,8 +2294,9 @@ impl<'a> Parser<'a> {
         Ok(ast::Path { segments, span: lo.to(self.prev_span) })
     }
 
-    /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat.
-    /// This is used when parsing derive macro paths in `#[derive]` attributes.
+    /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
+    /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
+    /// attributes.
     pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
         let meta_ident = match self.token {
             token::Interpolated(ref nt) => match nt.0 {
@@ -2423,7 +2421,7 @@ impl<'a> Parser<'a> {
         self.token.is_lifetime()
     }
 
-    /// Parse single lifetime 'a or panic.
+    /// Parses a single lifetime `'a` or panics.
     crate fn expect_lifetime(&mut self) -> Lifetime {
         if let Some(ident) = self.token.lifetime() {
             let span = self.span;
@@ -2444,7 +2442,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse mutability (`mut` or nothing).
+    /// Parses mutability (`mut` or nothing).
     fn parse_mutability(&mut self) -> Mutability {
         if self.eat_keyword(keywords::Mut) {
             Mutability::Mutable
@@ -2575,12 +2573,10 @@ impl<'a> Parser<'a> {
     }
 
     /// At the bottom (top?) of the precedence hierarchy,
-    /// parse things like parenthesized exprs,
-    /// macros, return, etc.
+    /// Parses things like parenthesized exprs, macros, `return`, etc.
     ///
-    /// N.B., this does not parse outer attributes,
-    ///     and is private because it only works
-    ///     correctly if called from parse_dot_or_call_expr().
+    /// N.B., this does not parse outer attributes, and is private because it only works
+    /// correctly if called from `parse_dot_or_call_expr()`.
     fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
@@ -2965,7 +2961,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse a block or unsafe block
+    /// Parses a block or unsafe block.
     fn parse_block_expr(&mut self, opt_label: Option<Label>,
                             lo: Span, blk_mode: BlockCheckMode,
                             outer_attrs: ThinVec<Attribute>)
@@ -2979,7 +2975,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
     }
 
-    /// parse a.b or a(13) or a[4] or just a
+    /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
     fn parse_dot_or_call_expr(&mut self,
                                   already_parsed_attrs: Option<ThinVec<Attribute>>)
                                   -> PResult<'a, P<Expr>> {
@@ -3287,7 +3283,7 @@ impl<'a> Parser<'a> {
         self.span = span;
     }
 
-    /// parse a single token tree from the input.
+    /// Parses a single token tree from the input.
     crate fn parse_token_tree(&mut self) -> TokenTree {
         match self.token {
             token::OpenDelim(..) => {
@@ -3447,7 +3443,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(lo.to(hi), ex, attrs));
     }
 
-    /// Parse an associative expression
+    /// Parses an associative expression.
     ///
     /// This parses an expression accounting for associativity and precedence of the operators in
     /// the expression.
@@ -3458,7 +3454,7 @@ impl<'a> Parser<'a> {
         self.parse_assoc_expr_with(0, already_parsed_attrs.into())
     }
 
-    /// Parse an associative expression with operators of at least `min_prec` precedence
+    /// Parses an associative expression with operators of at least `min_prec` precedence.
     fn parse_assoc_expr_with(&mut self,
                                  min_prec: usize,
                                  lhs: LhsExpr)
@@ -3793,7 +3789,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse an 'if' or 'if let' expression ('if' token already eaten)
+    /// Parses an `if` or `if let` expression (`if` token already eaten).
     fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
         if self.check_keyword(keywords::Let) {
             return self.parse_if_let_expr(attrs);
@@ -3829,7 +3825,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
     }
 
-    /// Parse an 'if let' expression ('if' token already eaten)
+    /// Parses an `if let` expression (`if` token already eaten).
     fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
                              -> PResult<'a, P<Expr>> {
         let lo = self.prev_span;
@@ -3847,7 +3843,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
     }
 
-    // `move |args| expr`
+    /// Parses `move |args| expr`.
     fn parse_lambda_expr(&mut self,
                              attrs: ThinVec<Attribute>)
                              -> PResult<'a, P<Expr>>
@@ -3943,7 +3939,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
     }
 
-    /// Parse a 'while' or 'while let' expression ('while' token already eaten)
+    /// Parses a `while` or `while let` expression (`while` token already eaten).
     fn parse_while_expr(&mut self, opt_label: Option<Label>,
                             span_lo: Span,
                             mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
@@ -3957,7 +3953,7 @@ impl<'a> Parser<'a> {
         return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
     }
 
-    /// Parse a 'while let' expression ('while' token already eaten)
+    /// Parses a `while let` expression (`while` token already eaten).
     fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
                                 span_lo: Span,
                                 mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
@@ -3981,7 +3977,7 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
     }
 
-    /// Parse an `async move {...}` expression
+    /// Parses an `async move {...}` expression.
     pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
         -> PResult<'a, P<Expr>>
     {
@@ -3999,7 +3995,7 @@ impl<'a> Parser<'a> {
             ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
     }
 
-    /// Parse a `try {...}` expression (`try` token already eaten)
+    /// Parses a `try {...}` expression (`try` token already eaten).
     fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
         -> PResult<'a, P<Expr>>
     {
@@ -4117,15 +4113,15 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse an expression
+    /// Parses an expression.
     #[inline]
     pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
         self.parse_expr_res(Restrictions::empty(), None)
     }
 
-    /// Evaluate the closure with restrictions in place.
+    /// Evaluates the closure with restrictions in place.
     ///
-    /// After the closure is evaluated, restrictions are reset.
+    /// Afters the closure is evaluated, restrictions are reset.
     fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
         where F: FnOnce(&mut Self) -> T
     {
@@ -4137,7 +4133,7 @@ impl<'a> Parser<'a> {
 
     }
 
-    /// Parse an expression, subject to the given restrictions
+    /// Parses an expression, subject to the given restrictions.
     #[inline]
     fn parse_expr_res(&mut self, r: Restrictions,
                           already_parsed_attrs: Option<ThinVec<Attribute>>)
@@ -4145,7 +4141,7 @@ impl<'a> Parser<'a> {
         self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
     }
 
-    /// Parse the RHS of a local variable declaration (e.g., '= 14;')
+    /// Parses the RHS of a local variable declaration (e.g., '= 14;').
     fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
         if self.eat(&token::Eq) {
             Ok(Some(self.parse_expr()?))
@@ -4156,7 +4152,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse patterns, separated by '|' s
+    /// Parses patterns, separated by '|' s.
     fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
         // Allow a '|' before the pats (RFC 1925 + RFC 2530)
         self.eat(&token::BinOp(token::Or));
@@ -4346,7 +4342,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse the fields of a struct-like pattern
+    /// Parses the fields of a struct-like pattern.
     fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
         let mut fields = Vec::new();
         let mut etc = false;
@@ -4538,13 +4534,13 @@ impl<'a> Parser<'a> {
         Ok(pat)
     }
 
-    /// Parse a pattern.
+    /// Parses a pattern.
     pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
         self.parse_pat_with_range_pat(true, expected)
     }
 
-    /// Parse a pattern, with a setting whether modern range patterns e.g., `a..=b`, `a..b` are
-    /// allowed.
+    /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
+    /// allowed).
     fn parse_pat_with_range_pat(
         &mut self,
         allow_range_pat: bool,
@@ -4754,9 +4750,9 @@ impl<'a> Parser<'a> {
         Ok(P(pat))
     }
 
-    /// Parse ident or ident @ pat
+    /// Parses `ident` or `ident @ pat`.
     /// used by the copy foo and ref foo patterns to give a good
-    /// error message when parsing mistakes like ref foo(a,b)
+    /// error message when parsing mistakes like `ref foo(a, b)`.
     fn parse_pat_ident(&mut self,
                        binding_mode: ast::BindingMode)
                        -> PResult<'a, PatKind> {
@@ -4782,7 +4778,7 @@ impl<'a> Parser<'a> {
         Ok(PatKind::Ident(binding_mode, ident, sub))
     }
 
-    /// Parse a local variable declaration
+    /// Parses a local variable declaration.
     fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
         let lo = self.prev_span;
         let pat = self.parse_top_level_pat()?;
@@ -4855,7 +4851,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse a structure field
+    /// Parses a structure field.
     fn parse_name_and_ty(&mut self,
                          lo: Span,
                          vis: Visibility,
@@ -4874,7 +4870,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Emit an expected item after attributes error.
+    /// Emits an expected-item-after-attributes error.
     fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a,  ()> {
         let message = match attrs.last() {
             Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
@@ -5297,13 +5293,13 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Is this expression a successfully-parsed statement?
+    /// Checks if this expression is a successfully parsed statement.
     fn expr_is_complete(&mut self, e: &Expr) -> bool {
         self.restrictions.contains(Restrictions::STMT_EXPR) &&
             !classify::expr_requires_semi_to_be_stmt(e)
     }
 
-    /// Parse a block. No inner attrs are allowed.
+    /// Parses a block. No inner attributes are allowed.
     pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
         maybe_whole!(self, NtBlock, |x| x);
 
@@ -5381,7 +5377,7 @@ impl<'a> Parser<'a> {
         self.parse_block_tail(lo, BlockCheckMode::Default)
     }
 
-    /// Parse a block. Inner attrs are allowed.
+    /// Parses a block. Inner attributes are allowed.
     fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
         maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
 
@@ -5391,7 +5387,7 @@ impl<'a> Parser<'a> {
             self.parse_block_tail(lo, BlockCheckMode::Default)?))
     }
 
-    /// Parse the rest of a block expression or function body
+    /// Parses the rest of a block expression or function body.
     /// Precondition: already parsed the '{'.
     fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
         let mut stmts = vec![];
@@ -5425,7 +5421,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse a statement, including the trailing semicolon.
+    /// Parses a statement, including the trailing semicolon.
     crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
         // skip looking for a trailing semicolon when we have an interpolated statement
         maybe_whole!(self, NtStmt, |x| Some(x));
@@ -5487,11 +5483,14 @@ impl<'a> Parser<'a> {
         ).emit();
     }
 
-    // Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
-    // BOUND = TY_BOUND | LT_BOUND
-    // LT_BOUND = LIFETIME (e.g., `'a`)
-    // TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
-    // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+    /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
+    ///
+    /// ```
+    /// BOUND = TY_BOUND | LT_BOUND
+    /// LT_BOUND = LIFETIME (e.g., `'a`)
+    /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
+    /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+    /// ```
     fn parse_generic_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, GenericBounds> {
         let mut bounds = Vec::new();
         loop {
@@ -5545,8 +5544,11 @@ impl<'a> Parser<'a> {
         self.parse_generic_bounds_common(true)
     }
 
-    // Parse bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
-    // BOUND = LT_BOUND (e.g., `'a`)
+    /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
+    ///
+    /// ```
+    /// BOUND = LT_BOUND (e.g., `'a`)
+    /// ```
     fn parse_lt_param_bounds(&mut self) -> GenericBounds {
         let mut lifetimes = Vec::new();
         while self.check_lifetime() {
@@ -5559,7 +5561,7 @@ impl<'a> Parser<'a> {
         lifetimes
     }
 
-    /// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?
+    /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
     fn parse_ty_param(&mut self,
                       preceding_attrs: Vec<Attribute>)
                       -> PResult<'a, GenericParam> {
@@ -5590,6 +5592,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the following grammar:
+    ///
     ///     TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
     fn parse_trait_item_assoc_ty(&mut self)
         -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
@@ -5631,8 +5634,8 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parses (possibly empty) list of lifetime and type parameters, possibly including
-    /// trailing comma and erroneous trailing attributes.
+    /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
+    /// a trailing comma and erroneous trailing attributes.
     crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
         let mut params = Vec::new();
         loop {
@@ -5690,7 +5693,7 @@ impl<'a> Parser<'a> {
         Ok(params)
     }
 
-    /// Parse a set of optional generic type parameter declarations. Where
+    /// Parses a set of optional generic type parameter declarations. Where
     /// clauses are not parsed here, and must be added later via
     /// `parse_where_clause()`.
     ///
@@ -5718,7 +5721,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse generic args (within a path segment) with recovery for extra leading angle brackets.
+    /// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
     /// For the purposes of understanding the parsing logic of generic arguments, this function
     /// can be thought of being the same as just calling `self.parse_generic_args()` if the source
     /// had the correct amount of leading angle brackets.
@@ -5952,7 +5955,7 @@ impl<'a> Parser<'a> {
         Ok((args, bindings))
     }
 
-    /// Parses an optional `where` clause and places it in `generics`.
+    /// Parses an optional where-clause and places it in `generics`.
     ///
     /// ```ignore (only-for-syntax-highlight)
     /// where T : Trait<U, V> + 'b, 'a : 'b
@@ -6116,7 +6119,7 @@ impl<'a> Parser<'a> {
         Ok((args, variadic))
     }
 
-    /// Parse the argument list and result type of a function declaration
+    /// Parses the argument list and result type of a function declaration.
     fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> {
 
         let (args, variadic) = self.parse_fn_args(true, allow_variadic)?;
@@ -6238,7 +6241,7 @@ impl<'a> Parser<'a> {
         Ok(Some(Arg::from_self(eself, eself_ident)))
     }
 
-    /// Parse the parameter list and result type of a function that may have a `self` parameter.
+    /// Parses the parameter list and result type of a function that may have a `self` parameter.
     fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a,  Arg>,
     {
@@ -6276,7 +6279,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    // parse the |arg, arg| header on a lambda
+    /// Parses the `|arg, arg|` header of a closure.
     fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
         let inputs_captures = {
             if self.eat(&token::OrOr) {
@@ -6302,7 +6305,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse the name and optional generic types of a function header.
+    /// Parses the name and optional generic types of a function header.
     fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
         let id = self.parse_ident()?;
         let generics = self.parse_generics()?;
@@ -6322,7 +6325,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse an item-position function declaration.
+    /// Parses an item-position function declaration.
     fn parse_item_fn(&mut self,
                      unsafety: Unsafety,
                      asyncness: IsAsync,
@@ -6337,21 +6340,22 @@ impl<'a> Parser<'a> {
         Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
     }
 
-    /// true if we are looking at `const ID`, false for things like `const fn` etc
+    /// Returns `true` if we are looking at `const ID`
+    /// (returns `false` for things like `const fn`, etc.).
     fn is_const_item(&mut self) -> bool {
         self.token.is_keyword(keywords::Const) &&
             !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
             !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
     }
 
-    /// parses all the "front matter" for a `fn` declaration, up to
+    /// Parses all the "front matter" for a `fn` declaration, up to
     /// and including the `fn` keyword:
     ///
     /// - `const fn`
     /// - `unsafe fn`
     /// - `const unsafe fn`
     /// - `extern fn`
-    /// - etc
+    /// - etc.
     fn parse_fn_front_matter(&mut self)
         -> PResult<'a, (
             Spanned<Constness>,
@@ -6378,7 +6382,7 @@ impl<'a> Parser<'a> {
         Ok((constness, unsafety, asyncness, abi))
     }
 
-    /// Parse an impl item.
+    /// Parses an impl item.
     pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
         maybe_whole!(self, NtImplItem, |x| x);
         let attrs = self.parse_outer_attributes()?;
@@ -6517,7 +6521,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse `trait Foo { ... }` or `trait Foo = Bar;`
+    /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
     fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
         let ident = self.parse_ident()?;
         let mut tps = self.parse_generics()?;
@@ -6608,9 +6612,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an implementation item, `impl` keyword is already parsed.
+    ///
     ///    impl<'a, T> TYPE { /* impl items */ }
     ///    impl<'a, T> TRAIT for TYPE { /* impl items */ }
     ///    impl<'a, T> !TRAIT for TYPE { /* impl items */ }
+    ///
     /// We actually parse slightly more relaxed grammar for better error reporting and recovery.
     ///     `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
     ///     `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
@@ -6702,7 +6708,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse struct Foo { ... }
+    /// Parses `struct Foo { ... }`.
     fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
         let class_name = self.parse_ident()?;
 
@@ -6756,7 +6762,7 @@ impl<'a> Parser<'a> {
         Ok((class_name, ItemKind::Struct(vdata, generics), None))
     }
 
-    /// Parse union Foo { ... }
+    /// Parses `union Foo { ... }`.
     fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
         let class_name = self.parse_ident()?;
 
@@ -6850,7 +6856,7 @@ impl<'a> Parser<'a> {
         Ok(fields)
     }
 
-    /// Parse a structure field declaration
+    /// Parses a structure field declaration.
     fn parse_single_struct_field(&mut self,
                                      lo: Span,
                                      vis: Visibility,
@@ -6912,7 +6918,7 @@ impl<'a> Parser<'a> {
         Ok(a_var)
     }
 
-    /// Parse an element of a struct definition
+    /// Parses an element of a struct declaration.
     fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
         let attrs = self.parse_outer_attributes()?;
         let lo = self.span;
@@ -6920,11 +6926,11 @@ impl<'a> Parser<'a> {
         self.parse_single_struct_field(lo, vis, attrs)
     }
 
-    /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
+    /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
     /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
-    /// If the following element can't be a tuple (i.e., it's a function definition,
-    /// it's not a tuple struct field) and the contents within the parens
-    /// isn't valid, emit a proper diagnostic.
+    /// If the following element can't be a tuple (i.e., it's a function definition), then
+    /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
+    /// so emit a proper diagnostic.
     pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
         maybe_whole!(self, NtVis, |x| x);
 
@@ -7005,7 +7011,7 @@ impl<'a> Parser<'a> {
         Ok(respan(lo, VisibilityKind::Public))
     }
 
-    /// Parse defaultness: `default` or nothing.
+    /// Parses defaultness (i.e., `default` or nothing).
     fn parse_defaultness(&mut self) -> Defaultness {
         // `pub` is included for better error messages
         if self.check_keyword(keywords::Default) &&
@@ -7054,7 +7060,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Given a termination token, parse all of the items in a module
+    /// Given a termination token, parses all of the items in a module.
     fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
         let mut items = vec![];
         while let Some(item) = self.parse_item()? {
@@ -7191,7 +7197,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Returns either a path to a module, or .
+    /// Returns a path to a module.
     pub fn default_submod_path(
         id: ast::Ident,
         relative: Option<ast::Ident>,
@@ -7334,7 +7340,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Read a module from a source file.
+    /// Reads a module from a source file.
     fn eval_src_mod(&mut self,
                     path: PathBuf,
                     directory_ownership: DirectoryOwnership,
@@ -7366,7 +7372,7 @@ impl<'a> Parser<'a> {
         Ok((m0, mod_attrs))
     }
 
-    /// Parse a function declaration from a foreign module
+    /// Parses a function declaration from a foreign module.
     fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
                              -> PResult<'a, ForeignItem> {
         self.expect_keyword(keywords::Fn)?;
@@ -7386,7 +7392,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse a static item from a foreign module.
+    /// Parses a static item from a foreign module.
     /// Assumes that the `static` keyword is already parsed.
     fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
                                  -> PResult<'a, ForeignItem> {
@@ -7406,7 +7412,7 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Parse a type from a foreign module
+    /// Parses a type from a foreign module.
     fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
                              -> PResult<'a, ForeignItem> {
         self.expect_keyword(keywords::Type)?;
@@ -7465,12 +7471,14 @@ impl<'a> Parser<'a> {
         Ok(ident)
     }
 
-    /// Parse extern crate links
+    /// Parses `extern crate` links.
     ///
     /// # Examples
     ///
+    /// ```
     /// extern crate foo;
     /// extern crate bar as foo;
+    /// ```
     fn parse_item_extern_crate(&mut self,
                                lo: Span,
                                visibility: Visibility,
@@ -7489,16 +7497,17 @@ impl<'a> Parser<'a> {
         Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
     }
 
-    /// Parse `extern` for foreign ABIs
-    /// modules.
+    /// Parses `extern` for foreign ABIs modules.
     ///
     /// `extern` is expected to have been
-    /// consumed before calling this method
+    /// consumed before calling this method.
     ///
-    /// # Examples:
+    /// # Examples
     ///
+    /// ```ignore (only-for-syntax-highlight)
     /// extern "C" {}
     /// extern {}
+    /// ```
     fn parse_item_foreign_mod(&mut self,
                               lo: Span,
                               opt_abi: Option<Abi>,
@@ -7525,11 +7534,12 @@ impl<'a> Parser<'a> {
         Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
     }
 
-    /// Parse `type Foo = Bar;`
+    /// Parses `type Foo = Bar;`
     /// or
     /// `existential type Foo: Bar;`
     /// or
-    /// `return None` without modifying the parser state
+    /// `return `None``
+    /// without modifying the parser state.
     fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
         // This parses the grammar:
         //     Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
@@ -7544,7 +7554,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse type alias or existential type
+    /// Parses a type alias or existential type.
     fn parse_existential_or_alias(
         &mut self,
         existential: bool,
@@ -7565,7 +7575,7 @@ impl<'a> Parser<'a> {
         Ok((ident, alias, tps))
     }
 
-    /// Parse the part of an "enum" decl following the '{'
+    /// Parses the part of an enum declaration following the `{`.
     fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
         let mut variants = Vec::new();
         let mut all_nullary = true;
@@ -7624,7 +7634,7 @@ impl<'a> Parser<'a> {
         Ok(ast::EnumDef { variants })
     }
 
-    /// Parse an "enum" declaration
+    /// Parses an enum declaration.
     fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
         let id = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
@@ -7720,7 +7730,7 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    /// Parse one of the items allowed by the flags.
+    /// Parses one of the items allowed by the flags.
     fn parse_item_implementation(
         &mut self,
         attrs: Vec<Attribute>,
@@ -8145,7 +8155,7 @@ impl<'a> Parser<'a> {
         self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
     }
 
-    /// Parse a foreign item.
+    /// Parses a foreign item.
     crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
         maybe_whole!(self, NtForeignItem, |ni| ni);
 
@@ -8261,7 +8271,7 @@ impl<'a> Parser<'a> {
         Ok(None)
     }
 
-    /// Parse a macro invocation inside a `trait`, `impl` or `extern` block
+    /// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
     fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
                                at_end: &mut bool) -> PResult<'a, Option<Mac>>
     {
@@ -8364,13 +8374,15 @@ impl<'a> Parser<'a> {
                                    *t == token::BinOp(token::Star))
     }
 
-    /// Parse UseTree
+    /// Parses a `UseTree`.
     ///
+    /// ```
     /// USE_TREE = [`::`] `*` |
     ///            [`::`] `{` USE_TREE_LIST `}` |
     ///            PATH `::` `*` |
     ///            PATH `::` `{` USE_TREE_LIST `}` |
     ///            PATH [`as` IDENT]
+    /// ```
     fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
         let lo = self.span;
 
@@ -8409,9 +8421,11 @@ impl<'a> Parser<'a> {
         Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
     }
 
-    /// Parse UseTreeKind::Nested(list)
+    /// Parses a `UseTreeKind::Nested(list)`.
     ///
+    /// ```
     /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
+    /// ```
     fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
         self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
                                  &token::CloseDelim(token::Brace),
@@ -8428,8 +8442,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parses a source module as a crate. This is the main
-    /// entry point for the parser.
+    /// Parses a source module as a crate. This is the main entry point for the parser.
     pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
         let lo = self.span;
         let krate = Ok(ast::Crate {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 09924e304cf..ff7f3e0bfae 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -38,16 +38,16 @@ pub enum BinOpToken {
     Shr,
 }
 
-/// A delimiter token
+/// A delimiter token.
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum DelimToken {
-    /// A round parenthesis: `(` or `)`
+    /// A round parenthesis (i.e., `(` or `)`).
     Paren,
-    /// A square bracket: `[` or `]`
+    /// A square bracket (i.e., `[` or `]`).
     Bracket,
-    /// A curly brace: `{` or `}`
+    /// A curly brace (i.e., `{` or `}`).
     Brace,
-    /// An empty delimiter
+    /// An empty delimiter.
     NoDelim,
 }
 
@@ -172,9 +172,9 @@ pub enum Token {
     Question,
     /// Used by proc macros for representing lifetimes, not generated by lexer right now.
     SingleQuote,
-    /// An opening delimiter, eg. `{`
+    /// An opening delimiter (e.g., `{`).
     OpenDelim(DelimToken),
-    /// A closing delimiter, eg. `}`
+    /// A closing delimiter (e.g., `}`).
     CloseDelim(DelimToken),
 
     /* Literals */
@@ -188,16 +188,16 @@ pub enum Token {
     // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
     Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
     // Can be expanded into several tokens.
-    /// Doc comment
+    /// A doc comment.
     DocComment(ast::Name),
 
     // Junk. These carry no data because we don't really care about the data
     // they *would* carry, and don't really want to allocate a new ident for
     // them. Instead, users could extract that from the associated span.
 
-    /// Whitespace
+    /// Whitespace.
     Whitespace,
-    /// Comment
+    /// A comment.
     Comment,
     Shebang(ast::Name),
 
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 2d837cb565b..d8a8cbb655b 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -1,10 +1,10 @@
 //! This pretty-printer is a direct reimplementation of Philip Karlton's
 //! Mesa pretty-printer, as described in appendix A of
 //!
-//! ````text
+//! ```text
 //! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen.
 //! Stanford Department of Computer Science, 1979.
-//! ````
+//! ```
 //!
 //! The algorithm's aim is to break a stream into as few lines as possible
 //! while respecting the indentation-consistency requirements of the enclosing
diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs
index 0ec83447d52..bc43630ae59 100644
--- a/src/libsyntax/ptr.rs
+++ b/src/libsyntax/ptr.rs
@@ -1,4 +1,4 @@
-//! The AST pointer
+//! The AST pointer.
 //!
 //! Provides `P<T>`, a frozen owned smart pointer, as a replacement for `@T` in
 //! the AST.
diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs
index 552a3d30261..1784bad0362 100644
--- a/src/libsyntax/source_map.rs
+++ b/src/libsyntax/source_map.rs
@@ -26,7 +26,7 @@ use log::debug;
 
 use crate::errors::SourceMapper;
 
-/// Return the span itself if it doesn't come from a macro expansion,
+/// Returns the span itself if it doesn't come from a macro expansion,
 /// otherwise return the call site span up to the `enclosing_sp` by
 /// following the `expn_info` chain.
 pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
@@ -62,7 +62,7 @@ pub trait FileLoader {
     /// Query the existence of a file.
     fn file_exists(&self, path: &Path) -> bool;
 
-    /// Return an absolute path to a file, if possible.
+    /// Returns an absolute path to a file, if possible.
     fn abs_path(&self, path: &Path) -> Option<PathBuf>;
 
     /// Read the contents of an UTF-8 file into memory.
@@ -398,7 +398,7 @@ impl SourceMap {
         }
     }
 
-    /// Returns `Some(span)`, a union of the lhs and rhs span.  The lhs must precede the rhs. If
+    /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
     /// there are gaps between lhs and rhs, the resulting union will cross these gaps.
     /// For this to work, the spans have to be:
     ///
@@ -511,7 +511,7 @@ impl SourceMap {
         Ok(FileLines {file: lo.file, lines: lines})
     }
 
-    /// Extract the source surrounding the given `Span` using the `extract_source` function. The
+    /// Extracts the source surrounding the given `Span` using the `extract_source` function. The
     /// extract function takes three arguments: a string slice containing the source, an index in
     /// the slice for the beginning of the span and an index in the slice for the end of the span.
     fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError>
@@ -561,7 +561,7 @@ impl SourceMap {
         }
     }
 
-    /// Return the source snippet as `String` corresponding to the given `Span`
+    /// Returns the source snippet as `String` corresponding to the given `Span`
     pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
         self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index]
                                                                 .to_string())
@@ -576,7 +576,7 @@ impl SourceMap {
         }
     }
 
-    /// Return the source snippet as `String` before the given `Span`
+    /// Returns the source snippet as `String` before the given `Span`
     pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
         self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string())
     }
@@ -1123,7 +1123,7 @@ mod tests {
 
     /// Given a string like " ~~~~~~~~~~~~ ", produces a span
     /// converting that range. The idea is that the string has the same
-    /// length as the input, and we uncover the byte positions.  Note
+    /// length as the input, and we uncover the byte positions. Note
     /// that this can span lines and so on.
     fn span_from_selection(input: &str, selection: &str) -> Span {
         assert_eq!(input.len(), selection.len());
@@ -1132,7 +1132,7 @@ mod tests {
         Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
     }
 
-    /// Test span_to_snippet and span_to_lines for a span converting 3
+    /// Tests span_to_snippet and span_to_lines for a span converting 3
     /// lines in the middle of a file.
     #[test]
     fn span_to_snippet_and_lines_spanning_multiple_lines() {
@@ -1175,7 +1175,7 @@ mod tests {
         assert_eq!(sstr, "blork.rs:2:1: 2:12");
     }
 
-    /// Test failing to merge two spans on different lines
+    /// Tests failing to merge two spans on different lines
     #[test]
     fn span_merging_fail() {
         let sm = SourceMap::new(FilePathMapping::empty());
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index b6e4d4cd976..c4f2cffb097 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -5,6 +5,7 @@
 //! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
 //!
 //! ## Ownership
+//!
 //! `TokenStreams` are persistent data structures constructed as ropes with reference
 //! counted-children. In general, this means that calling an operation on a `TokenStream`
 //! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
@@ -59,7 +60,7 @@ impl TokenTree {
         macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory), true)
     }
 
-    /// Check if this TokenTree is equal to the other, regardless of span information.
+    /// Checks if this TokenTree is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
             (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
@@ -89,7 +90,7 @@ impl TokenTree {
         }
     }
 
-    /// Retrieve the TokenTree's span.
+    /// Retrieves the TokenTree's span.
     pub fn span(&self) -> Span {
         match *self {
             TokenTree::Token(sp, _) => sp,
diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs
index c989fc7a5b8..2f150d22159 100644
--- a/src/libsyntax/util/lev_distance.rs
+++ b/src/libsyntax/util/lev_distance.rs
@@ -1,7 +1,7 @@
 use std::cmp;
 use crate::symbol::Symbol;
 
-/// Find the Levenshtein distance between two strings
+/// Finds the Levenshtein distance between two strings
 pub fn lev_distance(a: &str, b: &str) -> usize {
     // cases which don't require further computation
     if a.is_empty() {
@@ -32,7 +32,7 @@ pub fn lev_distance(a: &str, b: &str) -> usize {
     dcol[t_last + 1]
 }
 
-/// Find the best match for a given word in the given iterator
+/// Finds the best match for a given word in the given iterator
 ///
 /// As a loose rule to avoid the obviously incorrect suggestions, it takes
 /// an optional limit for the maximum allowable edit distance, which defaults
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index 61729a08060..5f15ede7b0b 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -70,7 +70,7 @@ pub enum Fixity {
 }
 
 impl AssocOp {
-    /// Create a new AssocOP from a token
+    /// Creates a new AssocOP from a token
     pub fn from_token(t: &Token) -> Option<AssocOp> {
         use AssocOp::*;
         match *t {
@@ -105,7 +105,7 @@ impl AssocOp {
         }
     }
 
-    /// Create a new AssocOp from ast::BinOpKind.
+    /// Creates a new AssocOp from ast::BinOpKind.
     pub fn from_ast_binop(op: BinOpKind) -> Self {
         use AssocOp::*;
         match op {
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index bcf1da66c04..733c4f83e37 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -66,7 +66,7 @@ pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
     })
 }
 
-/// Convert a vector of strings to a vector of Ident's
+/// Converts a vector of strings to a vector of Ident's
 pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> {
     ids.iter().map(|u| Ident::from_str(*u)).collect()
 }
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index bb3b0ea7359..a002394c710 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -6,7 +6,7 @@
 //! Note: it is an important invariant that the default visitor walks the body
 //! of a function in "execution order" (more concretely, reverse post-order
 //! with respect to the CFG implied by the AST), meaning that if AST node A may
-//! execute before AST node B, then A is visited first.  The borrow checker in
+//! execute before AST node B, then A is visited first. The borrow checker in
 //! particular relies on this property.
 //!
 //! Note: walking an AST before macro expansion is probably a bad idea. For
@@ -32,12 +32,12 @@ pub enum FnKind<'a> {
 }
 
 /// Each method of the Visitor trait is a hook to be potentially
-/// overridden.  Each method's default implementation recursively visits
+/// overridden. Each method's default implementation recursively visits
 /// the substructure of the input via the corresponding `walk` method;
 /// e.g., the `visit_mod` method by default calls `visit::walk_mod`.
 ///
 /// If you want to ensure that your code handles every variant
-/// explicitly, you need to override each method.  (And you also need
+/// explicitly, you need to override each method. (And you also need
 /// to monitor future changes to `Visitor` in case a new method with a
 /// new default implementation gets introduced.)
 pub trait Visitor<'ast>: Sized {