diff options
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 4 |
4 files changed, 6 insertions, 6 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 926385ccd11..7269afcaea4 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -90,7 +90,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut i = usize::MAX; let mut can_trim = true; let mut first = true; - for line in lines.iter() { + for line in &lines { for (j, c) in line.chars().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; @@ -125,7 +125,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { // one-line comments lose their prefix static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; - for prefix in ONLINERS.iter() { + for prefix in ONLINERS { if comment.starts_with(*prefix) { return (&comment[prefix.len()..]).to_string(); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2cf6058a433..ca6193508fe 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1526,7 +1526,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) fn check_tokenization (mut string_reader: StringReader, expected: Vec<token::Token> ) { - for expected_tok in expected.iter() { + for expected_tok in &expected { assert_eq!(&string_reader.next_token().tok, expected_tok); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8ac5b6e5274..81803d6bd89 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1163,7 +1163,7 @@ mod test { "impl z { fn a (self: Foo, &myarg: i32) {} }", ]; - for &src in srcs.iter() { + for &src in &srcs { let spans = get_spans_of_pat_idents(src); let Span{ lo, hi, .. } = spans[0]; assert!("self" == &src[lo.to_usize()..hi.to_usize()], diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d99095eeba3..b7960d9e709 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2706,7 +2706,7 @@ impl<'a> Parser<'a> { match self.token { token::Eof => { let open_braces = self.open_braces.clone(); - for sp in open_braces.iter() { + for sp in &open_braces { self.span_help(*sp, "did you mean to close this delimiter?"); } // There shouldn't really be a span, but it's easier for the test runner @@ -5200,7 +5200,7 @@ impl<'a> Parser<'a> { Some(i) => { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); - for p in included_mod_stack[i.. len].iter() { + for p in &included_mod_stack[i.. len] { err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } |
