diff options
| author | Alex Crichton <alex@alexcrichton.com> | 2015-02-02 11:01:12 -0800 |
|---|---|---|
| committer | Alex Crichton <alex@alexcrichton.com> | 2015-02-02 11:01:12 -0800 |
| commit | 7335c7dd63cafe70ffca76677f9e33bc6eccefaa (patch) | |
| tree | 3646a5159f74b47d8bdf471eff072ea71f395d82 /src/libsyntax/parse | |
| parent | 075588a4939acb47feea79779a9bdacce702d9c5 (diff) | |
| parent | 3484706c38272828efc50b2553578afc62230dbb (diff) | |
| download | rust-7335c7dd63cafe70ffca76677f9e33bc6eccefaa.tar.gz rust-7335c7dd63cafe70ffca76677f9e33bc6eccefaa.zip | |
rollup merge of #21830: japaric/for-cleanup
Conflicts: src/librustc/metadata/filesearch.rs src/librustc_back/target/mod.rs src/libstd/os.rs src/libstd/sys/windows/os.rs src/libsyntax/ext/tt/macro_parser.rs src/libsyntax/print/pprust.rs src/test/compile-fail/issue-2149.rs
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 6 |
4 files changed, 7 insertions, 7 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index a026b8feee1..b17fc7fe82e 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -90,7 +90,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut i = usize::MAX; let mut can_trim = true; let mut first = true; - for line in lines.iter() { + for line in &lines { for (j, c) in line.chars().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; @@ -125,7 +125,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { // one-line comments lose their prefix static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; - for prefix in ONLINERS.iter() { + for prefix in ONLINERS { if comment.starts_with(*prefix) { return (&comment[prefix.len()..]).to_string(); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 65051ff8360..e6da47304ce 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1526,7 +1526,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) fn check_tokenization (mut string_reader: StringReader, expected: Vec<token::Token> ) { - for expected_tok in expected.iter() { + for expected_tok in &expected { assert_eq!(&string_reader.next_token().tok, expected_tok); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b919f70c3cd..72e4a74bc73 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1164,7 +1164,7 @@ mod test { "impl z { fn a (self: Foo, &myarg: i32) {} }", ]; - for &src in srcs.iter() { + for &src in &srcs { let spans = get_spans_of_pat_idents(src); let Span{ lo, hi, .. } = spans[0]; assert!("self" == &src[lo.to_usize()..hi.to_usize()], diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0ed85a014ef..c56734439eb 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2713,7 +2713,7 @@ impl<'a> Parser<'a> { match self.token { token::Eof => { let open_braces = self.open_braces.clone(); - for sp in open_braces.iter() { + for sp in &open_braces { self.span_help(*sp, "did you mean to close this delimiter?"); } // There shouldn't really be a span, but it's easier for the test runner @@ -5207,7 +5207,7 @@ impl<'a> Parser<'a> { Some(i) => { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); - for p in included_mod_stack[i.. len].iter() { + for p in &included_mod_stack[i.. len] { err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } @@ -5452,7 +5452,7 @@ impl<'a> Parser<'a> { seq_sep_trailing_allowed(token::Comma), |p| p.parse_ty_sum() ); - for ty in arg_tys.into_iter() { + for ty in arg_tys { args.push(ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID, |
