about summary refs log tree commit diff
path: root/src/libsyntax/parse/token.rs
diff options
context:
space:
mode:
authorAlexander Regueiro <alexreg@me.com>2019-02-08 14:53:55 +0100
committerAlexander Regueiro <alexreg@me.com>2019-02-10 23:42:32 +0000
commitc3e182cf43aea2c010a1915eb37293a458df2228 (patch)
tree225aa2dfceff56d10c0b31f6966fbf7ec5da8180 /src/libsyntax/parse/token.rs
parent0b7af2668a80fb2fa720a06ca44aff4dd1e9de38 (diff)
downloadrust-c3e182cf43aea2c010a1915eb37293a458df2228.tar.gz
rust-c3e182cf43aea2c010a1915eb37293a458df2228.zip
rustc: doc comments
Diffstat (limited to 'src/libsyntax/parse/token.rs')
-rw-r--r--src/libsyntax/parse/token.rs20
1 files changed, 10 insertions, 10 deletions
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 09924e304cf..ff7f3e0bfae 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -38,16 +38,16 @@ pub enum BinOpToken {
     Shr,
 }
 
-/// A delimiter token
+/// A delimiter token.
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum DelimToken {
-    /// A round parenthesis: `(` or `)`
+    /// A round parenthesis (i.e., `(` or `)`).
     Paren,
-    /// A square bracket: `[` or `]`
+    /// A square bracket (i.e., `[` or `]`).
     Bracket,
-    /// A curly brace: `{` or `}`
+    /// A curly brace (i.e., `{` or `}`).
     Brace,
-    /// An empty delimiter
+    /// An empty delimiter.
     NoDelim,
 }
 
@@ -172,9 +172,9 @@ pub enum Token {
     Question,
     /// Used by proc macros for representing lifetimes, not generated by lexer right now.
     SingleQuote,
-    /// An opening delimiter, eg. `{`
+    /// An opening delimiter (e.g., `{`).
     OpenDelim(DelimToken),
-    /// A closing delimiter, eg. `}`
+    /// A closing delimiter (e.g., `}`).
     CloseDelim(DelimToken),
 
     /* Literals */
@@ -188,16 +188,16 @@ pub enum Token {
     // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
     Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
     // Can be expanded into several tokens.
-    /// Doc comment
+    /// A doc comment.
     DocComment(ast::Name),
 
     // Junk. These carry no data because we don't really care about the data
     // they *would* carry, and don't really want to allocate a new ident for
     // them. Instead, users could extract that from the associated span.
 
-    /// Whitespace
+    /// Whitespace.
     Whitespace,
-    /// Comment
+    /// A comment.
     Comment,
     Shebang(ast::Name),