From c3e182cf43aea2c010a1915eb37293a458df2228 Mon Sep 17 00:00:00 2001 From: Alexander Regueiro Date: Fri, 8 Feb 2019 14:53:55 +0100 Subject: rustc: doc comments --- src/libsyntax/parse/lexer/comments.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 12 +- src/libsyntax/parse/mod.rs | 43 +++-- src/libsyntax/parse/parser.rs | 319 ++++++++++++++++++---------------- src/libsyntax/parse/token.rs | 20 +-- 5 files changed, 206 insertions(+), 194 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 4632d814d5c..74fff3324ea 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -197,9 +197,9 @@ fn read_line_comments(rdr: &mut StringReader<'_>, } } -/// Returns None if the first col chars of s contain a non-whitespace char. -/// Otherwise returns Some(k) where k is first char offset after that leading -/// whitespace. Note k may be outside bounds of s. +/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char. +/// Otherwise returns `Some(k)` where `k` is first char offset after that leading +/// whitespace. Note that `k` may be outside bounds of `s`. fn all_whitespace(s: &str, col: CharPos) -> Option { let mut idx = 0; for (i, ch) in s.char_indices().take(col.to_usize()) { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d3fc1c03634..9168d4b61c1 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -112,7 +112,7 @@ impl<'a> StringReader<'a> { self.unwrap_or_abort(res) } - /// Return the next token. EFFECT: advances the string_reader. + /// Returns the next token. EFFECT: advances the string_reader. pub fn try_next_token(&mut self) -> Result { assert!(self.fatal_errs.is_empty()); let ret_val = TokenAndSpan { @@ -425,7 +425,7 @@ impl<'a> StringReader<'a> { self.with_str_from_to(start, self.pos, f) } - /// Create a Name from a given offset to the current offset, each + /// Creates a Name from a given offset to the current offset, each /// adjusted 1 towards each other (assumes that on either side there is a /// single-byte delimiter). fn name_from(&self, start: BytePos) -> ast::Name { @@ -670,7 +670,7 @@ impl<'a> StringReader<'a> { } /// If there is whitespace, shebang, or a comment, scan it. Otherwise, - /// return None. + /// return `None`. fn scan_whitespace_or_comment(&mut self) -> Option { match self.ch.unwrap_or('\0') { // # to handle shebang at start of file -- this is the entry point @@ -920,7 +920,7 @@ impl<'a> StringReader<'a> { /// in a byte, (non-raw) byte string, char, or (non-raw) string literal. /// `start` is the position of `first_source_char`, which is already consumed. /// - /// Returns true if there was a valid char/byte, false otherwise. + /// Returns `true` if there was a valid char/byte. fn scan_char_or_byte(&mut self, start: BytePos, first_source_char: char, @@ -1152,7 +1152,7 @@ impl<'a> StringReader<'a> { } } - /// Check that a base is valid for a floating literal, emitting a nice + /// Checks that a base is valid for a floating literal, emitting a nice /// error if it isn't. fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) { match base { @@ -1185,7 +1185,7 @@ impl<'a> StringReader<'a> { } } - /// Return the next token from the string, advances the input past that + /// Returns the next token from the string, advances the input past that /// token, and updates the interner fn next_token_inner(&mut self) -> Result { let c = self.ch; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 317d6933207..69940ae621c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1,4 +1,4 @@ -//! The main parser interface +//! The main parser interface. use crate::ast::{self, CrateConfig, NodeId}; use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; @@ -38,12 +38,11 @@ pub struct ParseSess { pub unstable_features: UnstableFeatures, pub config: CrateConfig, pub missing_fragment_specifiers: Lock>, - /// Places where raw identifiers were used. This is used for feature gating - /// raw identifiers + /// Places where raw identifiers were used. This is used for feature-gating raw identifiers. pub raw_identifier_spans: Lock>, - /// The registered diagnostics codes + /// The registered diagnostics codes. crate registered_diagnostics: Lock, - /// Used to determine and report recursive mod inclusions + /// Used to determine and report recursive module inclusions. included_mod_stack: Lock>, source_map: Lrc, pub buffered_lints: Lock>, @@ -146,12 +145,12 @@ pub fn parse_stream_from_source_str( source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } -/// Create a new parser from a source string +/// Creates a new parser from a source string. pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } -/// Create a new parser from a source string. Returns any buffered errors from lexing the initial +/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial /// token stream. pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Result, Vec> @@ -162,13 +161,13 @@ pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source Ok(parser) } -/// Create a new parser, handling errors as appropriate +/// Creates a new parser, handling errors as appropriate /// if the file doesn't exist pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { source_file_to_parser(sess, file_to_source_file(sess, path, None)) } -/// Create a new parser, returning buffered diagnostics if the file doesn't +/// Creates a new parser, returning buffered diagnostics if the file doesn't /// exist or from lexing the initial token stream. pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Result, Vec> { @@ -239,7 +238,7 @@ fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a session and a path and an optional span (for error reporting), -/// add the path to the session's source_map and return the new source_file. +/// add the path to the session's `source_map` and return the new `source_file`. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) -> Lrc { match try_file_to_source_file(sess, path, spanopt) { @@ -251,7 +250,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } } -/// Given a source_file, produce a sequence of token-trees +/// Given a source_file, produces a sequence of token trees. pub fn source_file_to_stream( sess: &ParseSess, source_file: Lrc, @@ -260,7 +259,7 @@ pub fn source_file_to_stream( panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } -/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from +/// Given a source file, produces a sequence of token trees. Returns any buffered errors from /// parsing the token tream. pub fn maybe_file_to_stream( sess: &ParseSess, @@ -295,12 +294,12 @@ pub fn maybe_file_to_stream( } } -/// Given stream and the `ParseSess`, produce a parser +/// Given stream and the `ParseSess`, produces a parser. pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> { Parser::new(sess, stream, None, true, false) } -/// Parse a string representing a character literal into its final form. +/// Parses a string representing a character literal into its final form. /// Rather than just accepting/rejecting a given literal, unescapes it as /// well. Can take any slice prefixed by a character escape. Returns the /// character and the number of characters consumed. @@ -359,15 +358,14 @@ fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) { } } -/// Parse a string representing a string literal into its final form. Does -/// unescaping. +/// Parses a string representing a string literal into its final form. Does unescaping. pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { debug!("str_lit: given {}", lit.escape_default()); let mut res = String::with_capacity(lit.len()); let error = |i| format!("lexer should have rejected {} at {}", lit, i); - /// Eat everything up to a non-whitespace + /// Eat everything up to a non-whitespace. fn eat<'a>(it: &mut iter::Peekable>) { loop { match it.peek().map(|x| x.1) { @@ -428,7 +426,7 @@ pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { res } -/// Parse a string representing a raw string literal into its final form. The +/// Parses a string representing a raw string literal into its final form. The /// only operation this does is convert embedded CRLF into a single LF. fn raw_str_lit(lit: &str) -> String { debug!("raw_str_lit: given {}", lit.escape_default()); @@ -554,7 +552,7 @@ fn float_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) filtered_float_lit(Symbol::intern(s), suffix, diag) } -/// Parse a string representing a byte literal into its final form. Similar to `char_lit` +/// Parses a string representing a byte literal into its final form. Similar to `char_lit`. fn byte_lit(lit: &str) -> (u8, usize) { let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i); @@ -591,7 +589,7 @@ fn byte_str_lit(lit: &str) -> Lrc> { let error = |i| panic!("lexer should have rejected {} at {}", lit, i); - /// Eat everything up to a non-whitespace + /// Eat everything up to a non-whitespace. fn eat>(it: &mut iter::Peekable) { loop { match it.peek().map(|x| x.1) { @@ -758,10 +756,11 @@ fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) }) } -/// `SeqSep` : a sequence separator (token) -/// and whether a trailing separator is allowed. +/// A sequence separator. pub struct SeqSep { + /// The seperator token. pub sep: Option, + /// `true` if a trailing separator is allowed. pub trailing_sep_allowed: bool, } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 69d6407d506..67154305735 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -75,7 +75,7 @@ bitflags::bitflags! { type ItemInfo = (Ident, ItemKind, Option>); -/// How to parse a path. +/// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous @@ -111,7 +111,7 @@ enum BlockMode { Ignore, } -/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression +/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at @@ -420,11 +420,11 @@ impl TokenType { } } -/// Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT`, +/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT`, /// `IDENT<::AssocTy>`. /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes -/// that IDENT is not the ident of a fn trait +/// that `IDENT` is not the ident of a fn trait. fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) @@ -525,7 +525,7 @@ impl From> for LhsExpr { } } -/// Create a placeholder argument. +/// Creates a placeholder argument. fn dummy_arg(span: Span) -> Arg { let ident = Ident::new(keywords::Invalid.name(), span); let pat = P(Pat { @@ -614,7 +614,7 @@ impl<'a> Parser<'a> { next } - /// Convert the current token to a string using self's reader + /// Converts the current token to a string using `self`'s reader. pub fn this_token_to_string(&self) -> String { pprust::token_to_string(&self.token) } @@ -649,8 +649,7 @@ impl<'a> Parser<'a> { } } - /// Expect and consume the token t. Signal an error if - /// the next token is not t. + /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { @@ -867,7 +866,7 @@ impl<'a> Parser<'a> { } } - /// returns the span of expr, if it was not interpolated or the span of the interpolated token + /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: PResult<'a, P>) -> PResult<'a, (Span, P)> { @@ -941,7 +940,7 @@ impl<'a> Parser<'a> { } } - /// Check if the next token is `tok`, and return `true` if so. + /// Checks if the next token is `tok`, and returns `true` if so. /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. @@ -951,8 +950,7 @@ impl<'a> Parser<'a> { is_present } - /// Consume token 'tok' if it exists. Returns true if the given - /// token was present, false otherwise. + /// Consumes a token 'tok' if it exists. Returns whether the given token was present. pub fn eat(&mut self, tok: &token::Token) -> bool { let is_present = self.check(tok); if is_present { self.bump() } @@ -964,8 +962,8 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw) } - /// If the next token is the given keyword, eat it and return - /// true. Otherwise, return false. + /// If the next token is the given keyword, eats it and returns + /// `true`. Otherwise, returns `false`. pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { if self.check_keyword(kw) { self.bump(); @@ -984,9 +982,9 @@ impl<'a> Parser<'a> { } } - /// If the given word is not a keyword, signal an error. - /// If the next token is not the given word, signal an error. - /// Otherwise, eat it. + /// If the given word is not a keyword, signals an error. + /// If the next token is not the given word, signals an error. + /// Otherwise, eats it. fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { if !self.eat_keyword(kw) { self.unexpected() @@ -1031,11 +1029,11 @@ impl<'a> Parser<'a> { } } - /// Expect and consume a `+`. if `+=` is seen, replace it with a `=` - /// and continue. If a `+` is not seen, return false. + /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=` + /// and continues. If a `+` is not seen, returns `false`. /// - /// This is using when token splitting += into +. - /// See issue 47856 for an example of when this may occur. + /// This is used when token-splitting `+=` into `+`. + /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); match self.token { @@ -1054,7 +1052,7 @@ impl<'a> Parser<'a> { /// Checks to see if the next token is either `+` or `+=`. - /// Otherwise returns false. + /// Otherwise returns `false`. fn check_plus(&mut self) -> bool { if self.token.is_like_plus() { true @@ -1065,8 +1063,8 @@ impl<'a> Parser<'a> { } } - /// Expect and consume an `&`. If `&&` is seen, replace it with a single - /// `&` and continue. If an `&` is not seen, signal an error. + /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single + /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); match self.token { @@ -1082,8 +1080,8 @@ impl<'a> Parser<'a> { } } - /// Expect and consume an `|`. If `||` is seen, replace it with a single - /// `|` and continue. If an `|` is not seen, signal an error. + /// Expects and consumes an `|`. If `||` is seen, replaces it with a single + /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); match self.token { @@ -1115,9 +1113,9 @@ impl<'a> Parser<'a> { } } - /// Attempt to consume a `<`. If `<<` is seen, replace it with a single - /// `<` and continue. If `<-` is seen, replace it with a single `<` - /// and continue. If a `<` is not seen, return false. + /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single + /// `<` and continue. If `<-` is seen, replaces it with a single `<` + /// and continue. If a `<` is not seen, returns false. /// /// This is meant to be used when parsing generics on a path to get the /// starting token. @@ -1159,9 +1157,8 @@ impl<'a> Parser<'a> { } } - /// Expect and consume a GT. if a >> is seen, replace it - /// with a single > and continue. If a GT is not seen, - /// signal an error. + /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it + /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); let ate = match self.token { @@ -1196,7 +1193,7 @@ impl<'a> Parser<'a> { } } - /// Eat and discard tokens until one of `kets` is encountered. Respects token trees, + /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. fn eat_to_tokens(&mut self, kets: &[&token::Token]) { let handler = self.diagnostic(); @@ -1209,8 +1206,8 @@ impl<'a> Parser<'a> { } } - /// Parse a sequence, including the closing delimiter. The function - /// f must consume tokens until reaching the next separator or + /// Parses a sequence, including the closing delimiter. The function + /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end(&mut self, ket: &token::Token, @@ -1226,8 +1223,8 @@ impl<'a> Parser<'a> { Ok(val) } - /// Parse a sequence, not including the closing delimiter. The function - /// f must consume tokens until reaching the next separator or + /// Parses a sequence, not including the closing delimiter. The function + /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_before_end( &mut self, @@ -1311,8 +1308,8 @@ impl<'a> Parser<'a> { Ok((v, recovered)) } - /// Parse a sequence, including the closing delimiter. The function - /// f must consume tokens until reaching the next separator or + /// Parses a sequence, including the closing delimiter. The function + /// `f` must consume tokens until reaching the next separator or /// closing bracket. fn parse_unspanned_seq( &mut self, @@ -1429,15 +1426,14 @@ impl<'a> Parser<'a> { &self.sess.span_diagnostic } - /// Is the current token one of the keywords that signals a bare function - /// type? + /// Is the current token one of the keywords that signals a bare function type? fn token_is_bare_fn_keyword(&mut self) -> bool { self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) || self.check_keyword(keywords::Extern) } - /// parse a `TyKind::BareFn` type: + /// Parses a `TyKind::BareFn` type. fn parse_ty_bare_fn(&mut self, generic_params: Vec) -> PResult<'a, TyKind> { /* @@ -1474,7 +1470,7 @@ impl<'a> Parser<'a> { }))) } - /// Parse asyncness: `async` or nothing + /// Parses asyncness: `async` or nothing. fn parse_asyncness(&mut self) -> IsAsync { if self.eat_keyword(keywords::Async) { IsAsync::Async { @@ -1486,7 +1482,7 @@ impl<'a> Parser<'a> { } } - /// Parse unsafety: `unsafe` or nothing. + /// Parses unsafety: `unsafe` or nothing. fn parse_unsafety(&mut self) -> Unsafety { if self.eat_keyword(keywords::Unsafe) { Unsafety::Unsafe @@ -1495,7 +1491,7 @@ impl<'a> Parser<'a> { } } - /// Parse the items in a trait declaration + /// Parses the items in a trait declaration. pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let attrs = self.parse_outer_attributes()?; @@ -1612,7 +1608,7 @@ impl<'a> Parser<'a> { }) } - /// Parse optional return type [ -> TY ] in function decl + /// Parses an optional return type `[ -> TY ]` in a function declaration. fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?)) @@ -1621,12 +1617,13 @@ impl<'a> Parser<'a> { } } - // Parse a type + /// Parses a type. pub fn parse_ty(&mut self) -> PResult<'a, P> { self.parse_ty_common(true, true) } - /// Parse a type in restricted contexts where `+` is not permitted. + /// Parses a type in restricted contexts where `+` is not permitted. + /// /// Example 1: `&'a TYPE` /// `+` is prohibited to maintain operator priority (P(+) < P(&)). /// Example 2: `value1 as TYPE + value2` @@ -1929,7 +1926,8 @@ impl<'a> Parser<'a> { self.look_ahead(offset + 1, |t| t == &token::Colon) } - /// Skip unexpected attributes and doc comments in this position and emit an appropriate error. + /// Skips unexpected attributes and doc comments in this position and emits an appropriate + /// error. fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { if let token::DocComment(_) = self.token { let mut err = self.diagnostic().struct_span_err( @@ -1958,8 +1956,7 @@ impl<'a> Parser<'a> { } } - /// This version of parse arg doesn't necessarily require - /// identifier names. + /// This version of parse arg doesn't necessarily require identifier names. fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> { maybe_whole!(self, NtArg, |x| x); @@ -2067,12 +2064,12 @@ impl<'a> Parser<'a> { Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID }) } - /// Parse a single function argument + /// Parses a single function argument. crate fn parse_arg(&mut self) -> PResult<'a, Arg> { self.parse_arg_general(true, false) } - /// Parse an argument in a lambda header e.g., |arg, arg| + /// Parses an argument in a lambda header (e.g., `|arg, arg|`). fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { let pat = self.parse_pat(Some("argument name"))?; let t = if self.eat(&token::Colon) { @@ -2099,7 +2096,7 @@ impl<'a> Parser<'a> { } } - /// Matches token_lit = LIT_INTEGER | ... + /// Matches `token_lit = LIT_INTEGER | ...`. fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { token::Interpolated(ref nt) => match nt.0 { @@ -2165,7 +2162,7 @@ impl<'a> Parser<'a> { Ok(out) } - /// Matches lit = true | false | token_lit + /// Matches `lit = true | false | token_lit`. crate fn parse_lit(&mut self) -> PResult<'a, Lit> { let lo = self.span; let lit = if self.eat_keyword(keywords::True) { @@ -2179,7 +2176,7 @@ impl<'a> Parser<'a> { Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) }) } - /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat) + /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); @@ -2221,7 +2218,7 @@ impl<'a> Parser<'a> { } } - /// Parses qualified path. + /// Parses a qualified path. /// Assumes that the leading `<` has been parsed already. /// /// `qualified_path = ::path` @@ -2297,8 +2294,9 @@ impl<'a> Parser<'a> { Ok(ast::Path { segments, span: lo.to(self.prev_span) }) } - /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat. - /// This is used when parsing derive macro paths in `#[derive]` attributes. + /// Like `parse_path`, but also supports parsing `Word` meta items into paths for + /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` + /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { token::Interpolated(ref nt) => match nt.0 { @@ -2423,7 +2421,7 @@ impl<'a> Parser<'a> { self.token.is_lifetime() } - /// Parse single lifetime 'a or panic. + /// Parses a single lifetime `'a` or panics. crate fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { let span = self.span; @@ -2444,7 +2442,7 @@ impl<'a> Parser<'a> { } } - /// Parse mutability (`mut` or nothing). + /// Parses mutability (`mut` or nothing). fn parse_mutability(&mut self) -> Mutability { if self.eat_keyword(keywords::Mut) { Mutability::Mutable @@ -2575,12 +2573,10 @@ impl<'a> Parser<'a> { } /// At the bottom (top?) of the precedence hierarchy, - /// parse things like parenthesized exprs, - /// macros, return, etc. + /// Parses things like parenthesized exprs, macros, `return`, etc. /// - /// N.B., this does not parse outer attributes, - /// and is private because it only works - /// correctly if called from parse_dot_or_call_expr(). + /// N.B., this does not parse outer attributes, and is private because it only works + /// correctly if called from `parse_dot_or_call_expr()`. fn parse_bottom_expr(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); @@ -2965,7 +2961,7 @@ impl<'a> Parser<'a> { } } - /// Parse a block or unsafe block + /// Parses a block or unsafe block. fn parse_block_expr(&mut self, opt_label: Option