diff options
| author | Patrick Walton <pcwalton@mimiga.net> | 2013-11-20 16:23:04 -0800 |
|---|---|---|
| committer | Patrick Walton <pcwalton@mimiga.net> | 2013-11-26 08:24:18 -0800 |
| commit | efc512362b0f2ae200ef079e3566c6b158a857cc (patch) | |
| tree | f13bd8c52a12ebff5bc304312aa9708bf34780dc /src/libsyntax/parse | |
| parent | a61a3678ebe5571842d4223e2a0313714893bbf7 (diff) | |
| download | rust-efc512362b0f2ae200ef079e3566c6b158a857cc.tar.gz rust-efc512362b0f2ae200ef079e3566c6b158a857cc.zip | |
libsyntax: Remove all non-`proc` `do` syntax.
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 46 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 4 |
4 files changed, 34 insertions, 40 deletions
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 8defd8a7b6c..d8f2d8a5380 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -106,9 +106,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str { } if can_trim { - do lines.map |line| { - line.slice(i + 1, line.len()).to_owned() - } + lines.map(|line| line.slice(i + 1, line.len()).to_owned()) } else { lines } @@ -377,10 +375,10 @@ pub fn gather_comments_and_literals(span_diagnostic: //discard, and look ahead; we're working with internal state let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); if token::is_lit(&tok) { - do with_str_from(rdr, bstart) |s| { + with_str_from(rdr, bstart, |s| { debug!("tok lit: {}", s); literals.push(lit {lit: s.to_owned(), pos: sp.lo}); - } + }) } else { debug!("tok: {}", token::to_str(get_ident_interner(), &tok)); } diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 6aa3962a0e7..e4b93c3b4d5 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -337,7 +337,7 @@ fn consume_any_line_comment(rdr: @mut StringReader) while rdr.curr != '\n' && !is_eof(rdr) { bump(rdr); } - let ret = do with_str_from(rdr, start_bpos) |string| { + let ret = with_str_from(rdr, start_bpos, |string| { // but comments with only more "/"s are not if !is_line_non_doc_comment(string) { Some(TokenAndSpan{ @@ -347,7 +347,7 @@ fn consume_any_line_comment(rdr: @mut StringReader) } else { None } - }; + }); if ret.is_some() { return ret; @@ -412,7 +412,7 @@ fn consume_block_comment(rdr: @mut StringReader) } let res = if is_doc_comment { - do with_str_from(rdr, start_bpos) |string| { + with_str_from(rdr, start_bpos, |string| { // but comments with only "*"s between two "/"s are not if !is_block_non_doc_comment(string) { Some(TokenAndSpan{ @@ -422,7 +422,7 @@ fn consume_block_comment(rdr: @mut StringReader) } else { None } - } + }) } else { None }; @@ -652,7 +652,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token { bump(rdr); } - return do with_str_from(rdr, start) |string| { + return with_str_from(rdr, start, |string| { if string == "_" { token::UNDERSCORE } else { @@ -661,7 +661,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token { // FIXME: perform NFKC normalization here. (Issue #2253) token::IDENT(str_to_ident(string), is_mod_name) } - } + }) } if is_dec_digit(c) { return scan_number(c, rdr); @@ -775,9 +775,9 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token { while ident_continue(rdr.curr) { bump(rdr); } - return do with_str_from(rdr, start) |lifetime_name| { + return with_str_from(rdr, start, |lifetime_name| { token::LIFETIME(str_to_ident(lifetime_name)) - } + }) } // Otherwise it is a character constant: diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5db26dd99dd..b9a7ec33ee4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1031,11 +1031,11 @@ impl Parser { // parse the methods in a trait declaration pub fn parse_trait_methods(&self) -> ~[trait_method] { - do self.parse_unspanned_seq( + self.parse_unspanned_seq( &token::LBRACE, &token::RBRACE, - seq_sep_none() - ) |p| { + seq_sep_none(), + |p| { let attrs = p.parse_outer_attributes(); let lo = p.span.lo; @@ -1048,11 +1048,11 @@ impl Parser { let generics = p.parse_generics(); - let (explicit_self, d) = do self.parse_fn_decl_with_self() |p| { + let (explicit_self, d) = self.parse_fn_decl_with_self(|p| { // This is somewhat dubious; We don't want to allow argument // names to be left off if there is a definition... p.parse_arg_general(false) - }; + }); let hi = p.last_span.hi; debug!("parse_trait_methods(): trait method signature ends in \ @@ -1108,7 +1108,7 @@ impl Parser { ); } } - } + }) } // parse a possibly mutable type @@ -3000,13 +3000,13 @@ impl Parser { let mutbl = self.parse_mutability(); pat = self.parse_pat_ident(BindByRef(mutbl)); } else { - let can_be_enum_or_struct = do self.look_ahead(1) |t| { + let can_be_enum_or_struct = self.look_ahead(1, |t| { match *t { token::LPAREN | token::LBRACKET | token::LT | token::LBRACE | token::MOD_SEP => true, _ => false, } - }; + }); if self.look_ahead(1, |t| *t == token::DOTDOT) { let start = self.parse_expr_res(RESTRICT_NO_BAR_OP); @@ -3040,18 +3040,18 @@ impl Parser { let mut args: ~[@Pat] = ~[]; match *self.token { token::LPAREN => { - let is_star = do self.look_ahead(1) |t| { + let is_star = self.look_ahead(1, |t| { match *t { token::BINOP(token::STAR) => true, _ => false, } - }; - let is_dotdot = do self.look_ahead(1) |t| { + }); + let is_dotdot = self.look_ahead(1, |t| { match *t { token::DOTDOT => true, _ => false, } - }; + }); if is_star | is_dotdot { // This is a "top constructor only" pat self.bump(); @@ -3884,9 +3884,9 @@ impl Parser { let pur = self.parse_fn_purity(); let ident = self.parse_ident(); let generics = self.parse_generics(); - let (explicit_self, decl) = do self.parse_fn_decl_with_self() |p| { + let (explicit_self, decl) = self.parse_fn_decl_with_self(|p| { p.parse_arg() - }; + }); let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let hi = body.span.hi; @@ -4027,11 +4027,11 @@ impl Parser { } else if *self.token == token::LPAREN { // It's a tuple-like struct. is_tuple_like = true; - fields = do self.parse_unspanned_seq( + fields = self.parse_unspanned_seq( &token::LPAREN, &token::RPAREN, - seq_sep_trailing_allowed(token::COMMA) - ) |p| { + seq_sep_trailing_allowed(token::COMMA), + |p| { let attrs = self.parse_outer_attributes(); let lo = p.span.lo; let struct_field_ = ast::struct_field_ { @@ -4041,7 +4041,7 @@ impl Parser { attrs: attrs, }; @spanned(lo, p.span.hi, struct_field_) - }; + }); self.expect(&token::SEMI); } else if self.eat(&token::SEMI) { // It's a unit-like struct. @@ -4259,20 +4259,16 @@ impl Parser { path: Path, outer_attrs: ~[ast::Attribute], id_sp: Span) -> (ast::item_, ~[ast::Attribute]) { - let maybe_i = do self.sess.included_mod_stack.iter().position |p| { *p == path }; + let maybe_i = self.sess.included_mod_stack.iter().position(|p| *p == path); match maybe_i { Some(i) => { let stack = &self.sess.included_mod_stack; let mut err = ~"circular modules: "; for p in stack.slice(i, stack.len()).iter() { - do p.display().with_str |s| { - err.push_str(s); - } + p.display().with_str(|s| err.push_str(s)); err.push_str(" -> "); } - do path.display().with_str |s| { - err.push_str(s); - } + path.display().with_str(|s| err.push_str(s)); self.span_fatal(id_sp, err); } None => () diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 63f4f97889c..870c1bd74b1 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -169,9 +169,9 @@ pub fn to_str(input: @ident_interner, t: &Token) -> ~str { /* Literals */ LIT_CHAR(c) => { let mut res = ~"'"; - do char::from_u32(c).unwrap().escape_default |c| { + char::from_u32(c).unwrap().escape_default(|c| { res.push_char(c); - } + }); res.push_char('\''); res } |
