diff options
| author | Alexander Regueiro <alexreg@me.com> | 2019-02-08 14:53:55 +0100 |
|---|---|---|
| committer | Alexander Regueiro <alexreg@me.com> | 2019-02-10 23:42:32 +0000 |
| commit | c3e182cf43aea2c010a1915eb37293a458df2228 (patch) | |
| tree | 225aa2dfceff56d10c0b31f6966fbf7ec5da8180 /src/libsyntax/ext | |
| parent | 0b7af2668a80fb2fa720a06ca44aff4dd1e9de38 (diff) | |
| download | rust-c3e182cf43aea2c010a1915eb37293a458df2228.tar.gz rust-c3e182cf43aea2c010a1915eb37293a458df2228.zip | |
rustc: doc comments
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 34 | ||||
| -rw-r--r-- | src/libsyntax/ext/build.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/quoted.rs | 18 |
6 files changed, 41 insertions, 41 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 465b53184dc..fcb349205e3 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -327,34 +327,34 @@ macro_rules! make_stmts_default { /// The result of a macro expansion. The return values of the various /// methods are spliced into the AST at the callsite of the macro. pub trait MacResult { - /// Create an expression. + /// Creates an expression. fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> { None } - /// Create zero or more items. + /// Creates zero or more items. fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> { None } - /// Create zero or more impl items. + /// Creates zero or more impl items. fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> { None } - /// Create zero or more trait items. + /// Creates zero or more trait items. fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> { None } - /// Create zero or more items in an `extern {}` block + /// Creates zero or more items in an `extern {}` block fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None } - /// Create a pattern. + /// Creates a pattern. fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> { None } - /// Create zero or more statements. + /// Creates zero or more statements. /// /// By default this attempts to create an expression statement, /// returning None if that fails. @@ -461,7 +461,7 @@ pub struct DummyResult { } impl DummyResult { - /// Create a default MacResult that can be anything. + /// Creates a default MacResult that can be anything. /// /// Use this as a return value after hitting any errors and /// calling `span_err`. @@ -474,7 +474,7 @@ impl DummyResult { Box::new(DummyResult { expr_only: false, is_error: false, span }) } - /// Create a default MacResult that can only be an expression. + /// Creates a default MacResult that can only be an expression. /// /// Use this for macros that must expand to an expression, so even /// if an error is encountered internally, the user will receive @@ -677,7 +677,7 @@ pub enum SyntaxExtension { } impl SyntaxExtension { - /// Return which kind of macro calls this syntax extension. + /// Returns which kind of macro calls this syntax extension. pub fn kind(&self) -> MacroKind { match *self { SyntaxExtension::DeclMacro { .. } | @@ -835,8 +835,8 @@ impl<'a> ExtCtxt<'a> { expand::MacroExpander::new(self, false) } - /// Returns a `Folder` that deeply expands all macros and assigns all node ids in an AST node. - /// Once node ids are assigned, the node may not be expanded, removed, or otherwise modified. + /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node. + /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified. pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> { expand::MacroExpander::new(self, true) } @@ -976,9 +976,9 @@ impl<'a> ExtCtxt<'a> { } } -/// Extract a string literal from the macro expanded version of `expr`, +/// Extracts a string literal from the macro expanded version of `expr`, /// emitting `err_msg` if `expr` is not a string literal. This does not stop -/// compilation on error, merely emits a non-fatal error and returns None. +/// compilation on error, merely emits a non-fatal error and returns `None`. pub fn expr_to_spanned_string<'a>( cx: &'a mut ExtCtxt<'_>, mut expr: P<ast::Expr>, @@ -1022,7 +1022,7 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>, } /// Interpreting `tts` as a comma-separated sequence of expressions, -/// expect exactly one string literal, or emit an error and return None. +/// expect exactly one string literal, or emit an error and return `None`. pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree], @@ -1044,8 +1044,8 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>, }) } -/// Extract comma-separated expressions from `tts`. If there is a -/// parsing error, emit a non-fatal error and return None. +/// Extracts comma-separated expressions from `tts`. If there is a +/// parsing error, emit a non-fatal error and return `None`. pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 6708e3c12a0..48f6e4c0c82 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -347,7 +347,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { /// Constructs a qualified path. /// - /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A=Bar>`. + /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A = Bar>`. fn qpath_all(&self, self_type: P<ast::Ty>, trait_path: ast::Path, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 89d59478a5d..3b97242daa1 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -444,7 +444,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } } - /// Collect all macro invocations reachable at this time in this AST fragment, and replace + /// Collects all macro invocations reachable at this time in this AST fragment, and replace /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s. /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and /// prepares data for resolving paths of macro invocations. diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index a9000b89fb4..d4ea3b81a60 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -1,4 +1,4 @@ -//! This is an NFA-based parser, which calls out to the main rust parser for named nonterminals +//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier @@ -22,7 +22,7 @@ //! //! As it processes them, it fills up `eof_items` with threads that would be valid if //! the macro invocation is now over, `bb_items` with threads that are waiting on -//! a Rust nonterminal like `$e:expr`, and `next_items` with threads that are waiting +//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting //! on a particular token. Most of the logic concerns moving the · through the //! repetitions indicated by Kleene stars. The rules for moving the · without //! consuming any input are called epsilon transitions. It only advances or calls @@ -216,7 +216,7 @@ struct MatcherPos<'root, 'tt: 'root> { } impl<'root, 'tt> MatcherPos<'root, 'tt> { - /// Add `m` as a named match for the `idx`-th metavar. + /// Adds `m` as a named match for the `idx`-th metavar. fn push_match(&mut self, idx: usize, m: NamedMatch) { let matches = Rc::make_mut(&mut self.matches[idx]); matches.push(m); @@ -304,7 +304,7 @@ fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> { }.into_boxed_slice() } -/// Generate the top-level matcher position in which the "dot" is before the first token of the +/// Generates the top-level matcher position in which the "dot" is before the first token of the /// matcher `ms` and we are going to start matching at the span `open` in the source. fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> { let match_idx_hi = count_names(ms); @@ -337,7 +337,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP /// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`: /// so it is associated with a single ident in a parse, and all -/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type +/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type /// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a /// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it. /// @@ -414,7 +414,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>( Success(ret_val) } -/// Generate an appropriate parsing failure message. For EOF, this is "unexpected end...". For +/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For /// other tokens, this is "unexpected token...". pub fn parse_failure_msg(tok: Token) -> String { match tok { @@ -426,7 +426,7 @@ pub fn parse_failure_msg(tok: Token) -> String { } } -/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison) +/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) fn token_name_eq(t1: &Token, t2: &Token) -> bool { if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { id1.name == id2.name && is_raw1 == is_raw2 @@ -880,7 +880,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool { } } -/// A call to the "black-box" parser to parse some rust nonterminal. +/// A call to the "black-box" parser to parse some Rust non-terminal. /// /// # Parameters /// @@ -891,7 +891,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool { /// /// # Returns /// -/// The parsed nonterminal. +/// The parsed non-terminal. fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { if name == "tt" { return token::NtTT(p.parse_token_tree()); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 33ea675f9d1..897113ba885 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -432,7 +432,7 @@ fn check_lhs_nt_follows(sess: &ParseSess, // after parsing/expansion. we can report every error in every macro this way. } -/// Check that the lhs contains no repetition which could match an empty token +/// Checks that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { use quoted::TokenTree; @@ -960,8 +960,8 @@ fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool { } } -/// True if a fragment of type `frag` can be followed by any sort of -/// token. We use this (among other things) as a useful approximation +/// Returns `true` if a fragment of type `frag` can be followed by any sort of +/// token. We use this (among other things) as a useful approximation /// for when `frag` can be followed by a repetition like `$(...)*` or /// `$(...)+`. In general, these can be a bit tricky to reason about, /// so we adopt a conservative position that says that any fragment @@ -990,7 +990,7 @@ enum IsInFollow { Invalid(String, &'static str), } -/// True if `frag` can legally be followed by the token `tok`. For +/// Returns `true` if `frag` can legally be followed by the token `tok`. For /// fragments that can consume an unbounded number of tokens, `tok` /// must be within a well-defined follow set. This is intended to /// guarantee future compatibility: for example, without this rule, if diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 6c3cf3e6312..255795f28c7 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -22,17 +22,17 @@ pub struct Delimited { } impl Delimited { - /// Return the opening delimiter (possibly `NoDelim`). + /// Returns the opening delimiter (possibly `NoDelim`). pub fn open_token(&self) -> token::Token { token::OpenDelim(self.delim) } - /// Return the closing delimiter (possibly `NoDelim`). + /// Returns the closing delimiter (possibly `NoDelim`). pub fn close_token(&self) -> token::Token { token::CloseDelim(self.delim) } - /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter. + /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter. pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = if span.is_dummy() { span @@ -42,7 +42,7 @@ impl Delimited { TokenTree::Token(open_span, self.open_token()) } - /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter. + /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = if span.is_dummy() { span @@ -107,7 +107,7 @@ impl TokenTree { } } - /// Returns true if the given token tree contains no other tokens. This is vacuously true for + /// Returns `true` if the given token tree contains no other tokens. This is vacuously true for /// single tokens or metavar/decls, but may be false for delimited trees or sequences. pub fn is_empty(&self) -> bool { match *self { @@ -120,7 +120,7 @@ impl TokenTree { } } - /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences. + /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences. pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { @@ -140,7 +140,7 @@ impl TokenTree { } } - /// Retrieve the `TokenTree`'s span. + /// Retrieves the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) @@ -411,8 +411,8 @@ where /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. /// -/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018, -/// `?` is a Kleene op and not a separator. +/// N.B., in the 2015 edition, `*` and `+` are the only Kleene operators, and `?` is a separator. +/// In the 2018 edition however, `?` is a Kleene operator, and not a separator. fn parse_sep_and_kleene_op<I>( input: &mut Peekable<I>, span: Span, |
