about summary refs log tree commit diff
path: root/src/libsyntax/util/parser_testing.rs
diff options
context:
space:
mode:
authorCorey Richardson <corey@octayn.net>2014-06-09 13:12:30 -0700
committerCorey Richardson <corey@octayn.net>2014-07-09 00:06:27 -0700
commit4989a56448c7e3047e0538ff4ef54c49db8a5a4f (patch)
tree99a15ab91675cd360008b542c3cde8a1f74d6f86 /src/libsyntax/util/parser_testing.rs
parent5716abe3f019ab7d9c8cdde9879332040191cf88 (diff)
downloadrust-4989a56448c7e3047e0538ff4ef54c49db8a5a4f.tar.gz
rust-4989a56448c7e3047e0538ff4ef54c49db8a5a4f.zip
syntax: doc comments all the things
Diffstat (limited to 'src/libsyntax/util/parser_testing.rs')
-rw-r--r--src/libsyntax/util/parser_testing.rs38
1 files changed, 19 insertions, 19 deletions
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index 04116dec60e..f50739a7069 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -17,14 +17,14 @@ use parse::token;
 
 use std::gc::Gc;
 
-// map a string to tts, using a made-up filename:
+/// Map a string to tts, using a made-up filename:
 pub fn string_to_tts(source_str: String) -> Vec<ast::TokenTree> {
     let ps = new_parse_sess();
     filemap_to_tts(&ps,
                    string_to_filemap(&ps, source_str, "bogofile".to_string()))
 }
 
-// map string to parser (via tts)
+/// Map string to parser (via tts)
 pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> {
     new_parser_from_source_str(ps,
                                Vec::new(),
@@ -40,51 +40,51 @@ fn with_error_checking_parse<T>(s: String, f: |&mut Parser| -> T) -> T {
     x
 }
 
-// parse a string, return a crate.
+/// Parse a string, return a crate.
 pub fn string_to_crate (source_str : String) -> ast::Crate {
     with_error_checking_parse(source_str, |p| {
         p.parse_crate_mod()
     })
 }
 
-// parse a string, return an expr
+/// Parse a string, return an expr
 pub fn string_to_expr (source_str : String) -> Gc<ast::Expr> {
     with_error_checking_parse(source_str, |p| {
         p.parse_expr()
     })
 }
 
-// parse a string, return an item
+/// Parse a string, return an item
 pub fn string_to_item (source_str : String) -> Option<Gc<ast::Item>> {
     with_error_checking_parse(source_str, |p| {
         p.parse_item(Vec::new())
     })
 }
 
-// parse a string, return a stmt
+/// Parse a string, return a stmt
 pub fn string_to_stmt(source_str : String) -> Gc<ast::Stmt> {
     with_error_checking_parse(source_str, |p| {
         p.parse_stmt(Vec::new())
     })
 }
 
-// parse a string, return a pat. Uses "irrefutable"... which doesn't
-// (currently) affect parsing.
+/// Parse a string, return a pat. Uses "irrefutable"... which doesn't
+/// (currently) affect parsing.
 pub fn string_to_pat(source_str: String) -> Gc<ast::Pat> {
     string_to_parser(&new_parse_sess(), source_str).parse_pat()
 }
 
-// convert a vector of strings to a vector of ast::Ident's
+/// Convert a vector of strings to a vector of ast::Ident's
 pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> {
     ids.iter().map(|u| token::str_to_ident(*u)).collect()
 }
 
-// does the given string match the pattern? whitespace in the first string
-// may be deleted or replaced with other whitespace to match the pattern.
-// this function is unicode-ignorant; fortunately, the careful design of
-// UTF-8 mitigates this ignorance.  In particular, this function only collapses
-// sequences of \n, \r, ' ', and \t, but it should otherwise tolerate unicode
-// chars. Unsurprisingly, it doesn't do NKF-normalization(?).
+/// Does the given string match the pattern? whitespace in the first string
+/// may be deleted or replaced with other whitespace to match the pattern.
+/// this function is unicode-ignorant; fortunately, the careful design of
+/// UTF-8 mitigates this ignorance.  In particular, this function only collapses
+/// sequences of \n, \r, ' ', and \t, but it should otherwise tolerate unicode
+/// chars. Unsurprisingly, it doesn't do NKF-normalization(?).
 pub fn matches_codepattern(a : &str, b : &str) -> bool {
     let mut idx_a = 0;
     let mut idx_b = 0;
@@ -122,9 +122,9 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool {
     }
 }
 
-// given a string and an index, return the first uint >= idx
-// that is a non-ws-char or is outside of the legal range of
-// the string.
+/// Given a string and an index, return the first uint >= idx
+/// that is a non-ws-char or is outside of the legal range of
+/// the string.
 fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
     let mut i = idx;
     let len = a.len();
@@ -134,7 +134,7 @@ fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
     i
 }
 
-// copied from lexer.
+/// Copied from lexer.
 pub fn is_whitespace(c: char) -> bool {
     return c == ' ' || c == '\t' || c == '\r' || c == '\n';
 }