diff options
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 1 |
4 files changed, 9 insertions, 10 deletions
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 6daeb1b3e1e..c5454a2ca95 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -73,8 +73,8 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str { let mut i = uint::max_value; let mut can_trim = true; let mut first = true; - for lines.iter().advance |line| { - for line.iter().enumerate().advance |(j, c)| { + foreach line in lines.iter() { + foreach (j, c) in line.iter().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; break; diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 1614a303d95..3259d49fcd1 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -837,7 +837,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) fn check_tokenization (env: Env, expected: ~[token::Token]) { - for expected.iter().advance |expected_tok| { + foreach expected_tok in expected.iter() { let TokenAndSpan {tok:actual_tok, sp: _} = env.string_reader.next_token(); assert_eq!(&actual_tok,expected_tok); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9444c463686..1d61c5be83d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -3150,7 +3150,7 @@ impl Parser { } = self.parse_items_and_view_items(first_item_attrs, false, false); - for items.iter().advance |item| { + foreach item in items.iter() { let decl = @spanned(item.span.lo, item.span.hi, decl_item(*item)); stmts.push(@spanned(item.span.lo, item.span.hi, stmt_decl(decl, self.get_id()))); @@ -3755,7 +3755,7 @@ impl Parser { fields = ~[]; while *self.token != token::RBRACE { let r = self.parse_struct_decl_field(); - for r.iter().advance |struct_field| { + foreach struct_field in r.iter() { fields.push(*struct_field) } } @@ -4038,7 +4038,7 @@ impl Parser { Some(i) => { let stack = &self.sess.included_mod_stack; let mut err = ~"circular modules: "; - for stack.slice(i, stack.len()).iter().advance |p| { + foreach p in stack.slice(i, stack.len()).iter() { err.push_str(p.to_str()); err.push_str(" -> "); } @@ -4246,7 +4246,7 @@ impl Parser { let mut fields: ~[@struct_field] = ~[]; while *self.token != token::RBRACE { let r = self.parse_struct_decl_field(); - for r.iter().advance |struct_field| { + foreach struct_field in r.iter() { fields.push(*struct_field); } } @@ -4286,7 +4286,7 @@ impl Parser { seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_ty(false) ); - for arg_tys.consume_iter().advance |ty| { + foreach ty in arg_tys.consume_iter() { args.push(ast::variant_arg { ty: ty, id: self.get_id(), @@ -4395,7 +4395,7 @@ impl Parser { self.bump(); let the_string = ident_to_str(&s); let mut abis = AbiSet::empty(); - for the_string.word_iter().advance |word| { + foreach word in the_string.word_iter() { match abi::lookup(word) { Some(abi) => { if abis.contains(abi) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 3365222036c..706357320a5 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -687,7 +687,6 @@ pub fn is_reserved_keyword(tok: &Token) -> bool { #[cfg(test)] mod test { use super::*; - use std::io; #[test] fn t1() { let a = fresh_name("ghi"); printfln!("interned name: %u,\ntextual name: %s\n", |
