diff options
| author | Jorge Aparicio <japaricious@gmail.com> | 2015-01-31 12:20:46 -0500 |
|---|---|---|
| committer | Jorge Aparicio <japaricious@gmail.com> | 2015-02-02 13:40:18 -0500 |
| commit | d5d7e6565a4034b93d19be1edafd20730a4276bc (patch) | |
| tree | f978751c20a214c9fe0cd2d60645a4e1a3b760fd | |
| parent | 9f90d666e0cd9a73ef35b76b6605f9d1f69df849 (diff) | |
| download | rust-d5d7e6565a4034b93d19be1edafd20730a4276bc.tar.gz rust-d5d7e6565a4034b93d19be1edafd20730a4276bc.zip | |
`for x in xs.iter()` -> `for x in &xs`
269 files changed, 1063 insertions, 1064 deletions
diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs index b73623223fd..b16e9dea03d 100644 --- a/src/compiletest/compiletest.rs +++ b/src/compiletest/compiletest.rs @@ -276,7 +276,7 @@ pub fn make_tests(config: &Config) -> Vec<test::TestDescAndFn> { config.src_base.display()); let mut tests = Vec::new(); let dirs = fs::readdir(&config.src_base).unwrap(); - for file in dirs.iter() { + for file in &dirs { let file = file.clone(); debug!("inspecting file {:?}", file.display()); if is_test(config, &file) { @@ -304,13 +304,13 @@ pub fn is_test(config: &Config, testfile: &Path) -> bool { let mut valid = false; - for ext in valid_extensions.iter() { + for ext in &valid_extensions { if name.ends_with(ext.as_slice()) { valid = true; } } - for pre in invalid_prefixes.iter() { + for pre in &invalid_prefixes { if name.starts_with(pre.as_slice()) { valid = false; } diff --git a/src/compiletest/procsrv.rs b/src/compiletest/procsrv.rs index 7f3865308da..57f4171f7c2 100644 --- a/src/compiletest/procsrv.rs +++ b/src/compiletest/procsrv.rs @@ -46,7 +46,7 @@ pub fn run(lib_path: &str, match cmd.spawn() { Ok(mut process) => { - for input in input.iter() { + if let Some(input) = input { process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap(); } let ProcessOutput { status, output, error } = @@ -78,7 +78,7 @@ pub fn run_background(lib_path: &str, match cmd.spawn() { Ok(mut process) => { - for input in input.iter() { + if let Some(input) = input { process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap(); } diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 18cb3d1d5b0..2143cf22e05 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -547,7 +547,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { exe_file.as_str().unwrap().replace("\\", "\\\\"))[]); // Add line breakpoints - for line in breakpoint_lines.iter() { + for line in &breakpoint_lines { script_str.push_str(&format!("break '{}':{}\n", testfile.filename_display(), *line)[]); @@ -683,13 +683,13 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path) script_str.push_str("type category enable Rust\n"); // Set breakpoints on every line that contains the string "#break" - for line in breakpoint_lines.iter() { + for line in &breakpoint_lines { script_str.push_str(format!("breakpoint set --line {}\n", line).as_slice()); } // Append the other commands - for line in commands.iter() { + for line in &commands { script_str.push_str(line.as_slice()); script_str.push_str("\n"); } @@ -847,7 +847,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String]) let mut rest = line.trim(); let mut first = true; let mut failed = false; - for frag in check_fragments[i].iter() { + for frag in &check_fragments[i] { let found = if first { if rest.starts_with(frag.as_slice()) { Some(0) @@ -915,7 +915,7 @@ fn check_error_patterns(props: &TestProps, missing_patterns[0]).as_slice(), proc_res); } else { - for pattern in missing_patterns.iter() { + for pattern in missing_patterns { error(format!("error pattern '{}' not found!", *pattern).as_slice()); } @@ -935,7 +935,7 @@ fn check_no_compiler_crash(proc_res: &ProcRes) { fn check_forbid_output(props: &TestProps, output_to_check: &str, proc_res: &ProcRes) { - for pat in props.forbid_output.iter() { + for pat in &props.forbid_output { if output_to_check.contains(pat.as_slice()) { fatal_proc_rec("forbidden pattern found in compiler output", proc_res); } @@ -1173,7 +1173,7 @@ fn compose_and_run_compiler( // FIXME (#9639): This needs to handle non-utf8 paths let extra_link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string()); - for rel_ab in props.aux_builds.iter() { + for rel_ab in &props.aux_builds { let abs_ab = config.aux_base.join(rel_ab.as_slice()); let aux_props = header::load_props(&abs_ab); let mut crate_type = if aux_props.no_prefer_dynamic { @@ -1510,7 +1510,7 @@ fn _arm_exec_compiled_test(config: &Config, runargs.push(format!("{}", config.adb_test_dir)); runargs.push(format!("{}", prog_short)); - for tv in args.args.iter() { + for tv in &args.args { runargs.push(tv.to_string()); } procsrv::run("", @@ -1591,7 +1591,7 @@ fn _arm_push_aux_shared_library(config: &Config, testfile: &Path) { let tdir = aux_output_dir_name(config, testfile); let dirs = fs::readdir(&tdir).unwrap(); - for file in dirs.iter() { + for file in &dirs { if file.extension_str() == Some("so") { // FIXME (#9639): This needs to handle non-utf8 paths let copy_result = procsrv::run("", diff --git a/src/compiletest/util.rs b/src/compiletest/util.rs index a116cc33690..b1e44ef34fe 100644 --- a/src/compiletest/util.rs +++ b/src/compiletest/util.rs @@ -26,7 +26,7 @@ static OS_TABLE: &'static [(&'static str, &'static str)] = &[ ]; pub fn get_os(triple: &str) -> &'static str { - for &(triple_os, os) in OS_TABLE.iter() { + for &(triple_os, os) in OS_TABLE { if triple.contains(triple_os) { return os } diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 5ada51976ac..0ff6cf7b79a 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -127,7 +127,7 @@ impl Drop for Arena { fn drop(&mut self) { unsafe { destroy_chunk(&*self.head.borrow()); - for chunk in self.chunks.borrow().iter() { + for chunk in &*self.chunks.borrow() { if !chunk.is_copy.get() { destroy_chunk(chunk); } diff --git a/src/libcollections/bench.rs b/src/libcollections/bench.rs index d03fbf00847..9301bf5e4ab 100644 --- a/src/libcollections/bench.rs +++ b/src/libcollections/bench.rs @@ -73,7 +73,7 @@ pub fn find_rand_n<M, T, I, F>(n: uint, let mut keys = (0..n).map(|_| rng.gen::<uint>() % n) .collect::<Vec<_>>(); - for k in keys.iter() { + for k in &keys { insert(map, *k); } diff --git a/src/libcollections/binary_heap.rs b/src/libcollections/binary_heap.rs index 8aa4c77f6f9..56bc573cbb4 100644 --- a/src/libcollections/binary_heap.rs +++ b/src/libcollections/binary_heap.rs @@ -696,7 +696,7 @@ mod tests { let iterout = [9, 5, 3]; let heap = BinaryHeap::from_vec(data); let mut i = 0; - for el in heap.iter() { + for el in &heap { assert_eq!(*el, iterout[i]); i += 1; } @@ -884,7 +884,7 @@ mod tests { let mut q: BinaryHeap<uint> = xs.iter().rev().map(|&x| x).collect(); - for &x in xs.iter() { + for &x in &xs { assert_eq!(q.pop().unwrap(), x); } } diff --git a/src/libcollections/bit.rs b/src/libcollections/bit.rs index ec2a274a45d..3d16dd4b166 100644 --- a/src/libcollections/bit.rs +++ b/src/libcollections/bit.rs @@ -976,7 +976,7 @@ impl Ord for Bitv { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Bitv { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - for bit in self.iter() { + for bit in self { try!(write!(fmt, "{}", if bit { 1u32 } else { 0u32 })); } Ok(()) @@ -1743,7 +1743,7 @@ impl fmt::Debug for BitvSet { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "BitvSet {{")); let mut first = true; - for n in self.iter() { + for n in self { if !first { try!(write!(fmt, ", ")); } @@ -1756,7 +1756,7 @@ impl fmt::Debug for BitvSet { impl<S: hash::Writer + hash::Hasher> hash::Hash<S> for BitvSet { fn hash(&self, state: &mut S) { - for pos in self.iter() { + for pos in self { pos.hash(state); } } @@ -2600,7 +2600,7 @@ mod bitv_bench { b.iter(|| { let mut sum = 0u; for _ in 0u..10 { - for pres in bitv.iter() { + for pres in &bitv { sum += pres as uint; } } @@ -2613,7 +2613,7 @@ mod bitv_bench { let bitv = Bitv::from_elem(BENCH_BITS, false); b.iter(|| { let mut sum = 0u; - for pres in bitv.iter() { + for pres in &bitv { sum += pres as uint; } sum @@ -2674,8 +2674,8 @@ mod bitv_set_test { fn test_bitv_set_frombitv_init() { let bools = [true, false]; let lengths = [10, 64, 100]; - for &b in bools.iter() { - for &l in lengths.iter() { + for &b in &bools { + for &l in &lengths { let bitset = BitvSet::from_bitv(Bitv::from_elem(l, b)); assert_eq!(bitset.contains(&1u), b); assert_eq!(bitset.contains(&(l-1u)), b); @@ -3062,7 +3062,7 @@ mod bitv_set_bench { |idx| {idx % 3 == 0})); b.iter(|| { let mut sum = 0u; - for idx in bitv.iter() { + for idx in &bitv { sum += idx as uint; } sum diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index ce5e8f07be1..bc657a19d78 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -856,7 +856,7 @@ impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> { #[stable(feature = "rust1", since = "1.0.0")] impl<S: Hasher, K: Hash<S>, V: Hash<S>> Hash<S> for BTreeMap<K, V> { fn hash(&self, state: &mut S) { - for elt in self.iter() { + for elt in self { elt.hash(state); } } @@ -1946,7 +1946,7 @@ mod bench { } b.iter(|| { - for entry in map.iter() { + for entry in &map { black_box(entry); } }); diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index 8d6f06b25c5..8fdfe9ed56a 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -435,13 +435,13 @@ impl<K: Clone, V: Clone> Clone for Node<K, V> { let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0); let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0); - for key in self.keys().iter() { + for key in self.keys() { keys.push(key.clone()) } - for val in self.vals().iter() { + for val in self.vals() { vals.push(val.clone()) } - for edge in self.edges().iter() { + for edge in self.edges() { edges.push(edge.clone()) } diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index 72d5bf6d799..a4e28d36a05 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -791,8 +791,8 @@ mod test { let mut set_a = BTreeSet::new(); let mut set_b = BTreeSet::new(); - for x in a.iter() { assert!(set_a.insert(*x)) } - for y in b.iter() { assert!(set_b.insert(*y)) } + for x in a { assert!(set_a.insert(*x)) } + for y in b { assert!(set_b.insert(*y)) } let mut i = 0; f(&set_a, &set_b, Counter { i: &mut i, expected: expected }); @@ -894,7 +894,7 @@ mod test { let set: BTreeSet<int> = xs.iter().map(|&x| x).collect(); - for x in xs.iter() { + for x in &xs { assert!(set.contains(x)); } } diff --git a/src/libcollections/dlist.rs b/src/libcollections/dlist.rs index beb2973febc..e229cd8a961 100644 --- a/src/libcollections/dlist.rs +++ b/src/libcollections/dlist.rs @@ -917,7 +917,7 @@ impl<A: fmt::Debug> fmt::Debug for DList<A> { impl<S: Writer + Hasher, A: Hash<S>> Hash<S> for DList<A> { fn hash(&self, state: &mut S) { self.len().hash(state); - for elt in self.iter() { + for elt in self { elt.hash(state); } } diff --git a/src/libcollections/enum_set.rs b/src/libcollections/enum_set.rs index 9765bb5875e..14a3a5a0990 100644 --- a/src/libcollections/enum_set.rs +++ b/src/libcollections/enum_set.rs @@ -36,7 +36,7 @@ impl<E:CLike + fmt::Debug> fmt::Debug for EnumSet<E> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "EnumSet {{")); let mut first = true; - for e in self.iter() { + for e in self { if !first { try!(write!(fmt, ", ")); } diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs index 7032a3d9137..4b9f3980db2 100644 --- a/src/libcollections/ring_buf.rs +++ b/src/libcollections/ring_buf.rs @@ -1573,7 +1573,7 @@ impl<A: Ord> Ord for RingBuf<A> { impl<S: Writer + Hasher, A: Hash<S>> Hash<S> for RingBuf<A> { fn hash(&self, state: &mut S) { self.len().hash(state); - for elt in self.iter() { + for elt in self { elt.hash(state); } } @@ -1856,7 +1856,7 @@ mod tests { b.iter(|| { let mut sum = 0; - for &i in ring.iter() { + for &i in &ring { sum += i; } test::black_box(sum); diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index affa4898010..9be918dbe15 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -1118,7 +1118,7 @@ impl<T: Clone, V: AsSlice<T>> SliceConcatExt<T, Vec<T>> for [V] { fn concat(&self) -> Vec<T> { let size = self.iter().fold(0u, |acc, v| acc + v.as_slice().len()); let mut result = Vec::with_capacity(size); - for v in self.iter() { + for v in self { result.push_all(v.as_slice()) } result @@ -1128,7 +1128,7 @@ impl<T: Clone, V: AsSlice<T>> SliceConcatExt<T, Vec<T>> for [V] { let size = self.iter().fold(0u, |acc, v| acc + v.as_slice().len()); let mut result = Vec::with_capacity(size + self.len()); let mut first = true; - for v in self.iter() { + for v in self { if first { first = false } else { result.push(sep.clone()) } result.push_all(v.as_slice()) } @@ -2681,13 +2681,13 @@ mod tests { assert_eq!(v.len(), 3); let mut cnt = 0u; - for f in v.iter() { + for f in &v { assert!(*f == Foo); cnt += 1; } assert_eq!(cnt, 3); - for f in v[1..3].iter() { + for f in &v[1..3] { assert!(*f == Foo); cnt += 1; } @@ -2707,7 +2707,7 @@ mod tests { let xs: [Foo; 3] = [Foo, Foo, Foo]; cnt = 0; - for f in xs.iter() { + for f in &xs { assert!(*f == Foo); cnt += 1; } @@ -2858,7 +2858,7 @@ mod bench { b.iter(|| { let mut sum = 0; - for x in v.iter() { + for x in &v { sum += *x; } // sum == 11806, to stop dead code elimination. diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 43b5d14cc86..75f7b812974 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -99,7 +99,7 @@ impl<S: Str> SliceConcatExt<str, String> for [S] { let len = s.iter().map(|s| s.as_slice().len()).sum(); let mut result = String::with_capacity(len); - for s in s.iter() { + for s in s { result.push_str(s.as_slice()) } @@ -125,7 +125,7 @@ impl<S: Str> SliceConcatExt<str, String> for [S] { let mut result = String::with_capacity(len); let mut first = true; - for s in s.iter() { + for s in s { if first { first = false; } else { @@ -2005,7 +2005,7 @@ mod tests { let s = "ศไทย中华Việt Nam"; let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; let mut pos = 0; - for ch in v.iter() { + for ch in &v { assert!(s.char_at(pos) == *ch); pos += ch.to_string().len(); } @@ -2703,7 +2703,7 @@ mod tests { &["\u{378}\u{308}\u{903}"], &["\u{378}\u{308}", "\u{903}"]), ]; - for &(s, g) in test_same.iter() { + for &(s, g) in &test_same[] { // test forward iterator assert!(order::equals(s.graphemes(true), g.iter().map(|&x| x))); assert!(order::equals(s.graphemes(false), g.iter().map(|&x| x))); @@ -2713,7 +2713,7 @@ mod tests { assert!(order::equals(s.graphemes(false).rev(), g.iter().rev().map(|&x| x))); } - for &(s, gt, gf) in test_diff.iter() { + for &(s, gt, gf) in &test_diff { // test forward iterator assert!(order::equals(s.graphemes(true), gt.iter().map(|&x| x))); assert!(order::equals(s.graphemes(false), gf.iter().map(|&x| x))); diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index d0b89cfa2c3..b9857973946 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -1118,7 +1118,7 @@ mod tests { (String::from_str("\u{20000}"), vec![0xD840, 0xDC00])]; - for p in pairs.iter() { + for p in &pairs { let (s, u) = (*p).clone(); let s_as_utf16 = s.utf16_units().collect::<Vec<u16>>(); let u_as_string = String::from_utf16(u.as_slice()).unwrap(); diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 82c1c37ab6a..e570d401609 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1547,7 +1547,7 @@ impl<T> Drop for Vec<T> { // zeroed (when moving out, because of #[unsafe_no_drop_flag]). if self.cap != 0 { unsafe { - for x in self.iter() { + for x in &*self { ptr::read(x); } dealloc(*self.ptr, self.cap) @@ -2129,7 +2129,7 @@ mod tests { v.push(()); assert_eq!(v.iter().count(), 2); - for &() in v.iter() {} + for &() in &v {} assert_eq!(v.iter_mut().count(), 2); v.push(()); diff --git a/src/libcollections/vec_map.rs b/src/libcollections/vec_map.rs index f2a9bb4392c..1adea45f630 100644 --- a/src/libcollections/vec_map.rs +++ b/src/libcollections/vec_map.rs @@ -90,7 +90,7 @@ impl<S: Writer + Hasher, V: Hash<S>> Hash<S> for VecMap<V> { // In order to not traverse the `VecMap` twice, count the elements // during iteration. let mut count: uint = 0; - for elt in self.iter() { + for elt in self { elt.hash(state); count += 1; } @@ -1112,7 +1112,7 @@ mod test_map { let map: VecMap<char> = xs.iter().map(|&x| x).collect(); - for &(k, v) in xs.iter() { + for &(k, v) in &xs { assert_eq!(map.get(&k), Some(&v)); } } diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 694888bb2bc..20ef30b0a3e 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -888,7 +888,7 @@ impl<T: Debug> Debug for [T] { try!(write!(f, "[")); } let mut is_first = true; - for x in self.iter() { + for x in self { if is_first { is_first = false; } else { diff --git a/src/libcore/hash/mod.rs b/src/libcore/hash/mod.rs index 5a4d2fffade..d73e6ed589f 100644 --- a/src/libcore/hash/mod.rs +++ b/src/libcore/hash/mod.rs @@ -205,7 +205,7 @@ impl<S: Writer + Hasher, T: Hash<S>> Hash<S> for [T] { #[inline] fn hash(&self, state: &mut S) { self.len().hash(state); - for elt in self.iter() { + for elt in self { elt.hash(state); } } diff --git a/src/libcoretest/cmp.rs b/src/libcoretest/cmp.rs index 6bc1f14cc5a..2e5c6fe5a2f 100644 --- a/src/libcoretest/cmp.rs +++ b/src/libcoretest/cmp.rs @@ -66,11 +66,11 @@ fn test_partial_min() { (1.0f64, NAN, None) ]; - for &(a, b, result) in data_integer.iter() { + for &(a, b, result) in &data_integer { assert!(partial_min(a, b) == result); } - for &(a, b, result) in data_float.iter() { + for &(a, b, result) in &data_float { assert!(partial_min(a, b) == result); } } @@ -99,11 +99,11 @@ fn test_partial_max() { (1.0f64, NAN, None) ]; - for &(a, b, result) in data_integer.iter() { + for &(a, b, result) in &data_integer { assert!(partial_max(a, b) == result); } - for &(a, b, result) in data_float.iter() { + for &(a, b, result) in &data_float { assert!(partial_max(a, b) == result); } } diff --git a/src/libcoretest/hash/mod.rs b/src/libcoretest/hash/mod.rs index 07f3ab4a5a7..2da3f370b40 100644 --- a/src/libcoretest/hash/mod.rs +++ b/src/libcoretest/hash/mod.rs @@ -25,7 +25,7 @@ impl Default for MyHasher { impl Writer for MyHasher { // Most things we'll just add up the bytes. fn write(&mut self, buf: &[u8]) { - for byte in buf.iter() { + for byte in buf { self.hash += *byte as u64; } } diff --git a/src/libcoretest/hash/sip.rs b/src/libcoretest/hash/sip.rs index 431f7e748f6..a493f71925e 100644 --- a/src/libcoretest/hash/sip.rs +++ b/src/libcoretest/hash/sip.rs @@ -109,7 +109,7 @@ fn test_siphash() { fn to_hex_str(r: &[u8; 8]) -> String { let mut s = String::new(); - for b in r.iter() { + for b in r { s.push_str(format!("{}", fmt::radix(*b, 16)).as_slice()); } s @@ -130,7 +130,7 @@ fn test_siphash() { fn result_str(h: u64) -> String { let r = result_bytes(h); let mut s = String::new(); - for b in r.iter() { + for b in &r { s.push_str(format!("{}", fmt::radix(*b, 16)).as_slice()); } s diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index 055672df5d1..b4eb8e9902a 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -315,7 +315,7 @@ impl Matches { /// Returns true if any of several options were matched. pub fn opts_present(&self, names: &[String]) -> bool { - for nm in names.iter() { + for nm in names { match find_opt(self.opts.as_slice(), Name::from_str(&nm[])) { Some(id) if !self.vals[id].is_empty() => return true, _ => (), @@ -326,7 +326,7 @@ impl Matches { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option<String> { - for nm in names.iter() { + for nm in names { match self.opt_val(&nm[]) { Some(Val(ref s)) => return Some(s.clone()), _ => () @@ -342,7 +342,7 @@ impl Matches { pub fn opt_strs(&self, nm: &str) -> Vec<String> { let mut acc: Vec<String> = Vec::new(); let r = self.opt_vals(nm); - for v in r.iter() { + for v in &r { match *v { Val(ref s) => acc.push((*s).clone()), _ => () @@ -395,7 +395,7 @@ fn find_opt(opts: &[Opt], nm: Name) -> Option<uint> { } // Search in aliases. - for candidate in opts.iter() { + for candidate in opts { if candidate.aliases.iter().position(|opt| opt.name == nm).is_some() { return opts.iter().position(|opt| opt.name == candidate.name); } @@ -648,7 +648,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { } } let mut name_pos = 0; - for nm in names.iter() { + for nm in &names { name_pos += 1; let optid = match find_opt(opts.as_slice(), (*nm).clone()) { Some(id) => id, diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 3606387ad23..e9c7f837014 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -548,7 +548,7 @@ pub fn render_opts<'a, N:Clone+'a, E:Clone+'a, G:Labeller<'a,N,E>+GraphWalk<'a,N options: &[RenderOption]) -> old_io::IoResult<()> { fn writeln<W:Writer>(w: &mut W, arg: &[&str]) -> old_io::IoResult<()> { - for &s in arg.iter() { try!(w.write_str(s)); } + for &s in arg { try!(w.write_str(s)); } w.write_char('\n') } @@ -557,7 +557,7 @@ pub fn render_opts<'a, N:Clone+'a, E:Clone+'a, G:Labeller<'a,N,E>+GraphWalk<'a,N } try!(writeln(w, &["digraph ", g.graph_id().as_slice(), " {"])); - for n in g.nodes().iter() { + for n in &*g.nodes() { try!(indent(w)); let id = g.node_id(n); if options.contains(&RenderOption::NoNodeLabels) { @@ -569,7 +569,7 @@ pub fn render_opts<'a, N:Clone+'a, E:Clone+'a, G:Labeller<'a,N,E>+GraphWalk<'a,N } } - for e in g.edges().iter() { + for e in &*g.edges() { let escaped_label = g.edge_label(e).escape(); try!(indent(w)); let source = g.source(e); diff --git a/src/librand/distributions/mod.rs b/src/librand/distributions/mod.rs index 1b5e5ae8398..fb543e3d549 100644 --- a/src/librand/distributions/mod.rs +++ b/src/librand/distributions/mod.rs @@ -305,7 +305,7 @@ mod tests { let mut rng = CountingRng { i: 0 }; - for &val in expected.iter() { + for &val in &expected { assert_eq!(wc.ind_sample(&mut rng), val) } }} diff --git a/src/librand/distributions/range.rs b/src/librand/distributions/range.rs index 16830c84c46..ab0b45e7d32 100644 --- a/src/librand/distributions/range.rs +++ b/src/librand/distributions/range.rs @@ -188,7 +188,7 @@ mod tests { let v: &[($ty, $ty)] = &[(0, 10), (10, 127), (Int::min_value(), Int::max_value())]; - for &(low, high) in v.iter() { + for &(low, high) in v { let mut sampler: Range<$ty> = Range::new(low, high); for _ in 0u..1000 { let v = sampler.sample(&mut rng); @@ -214,7 +214,7 @@ mod tests { (-1e35, -1e25), (1e-35, 1e-25), (-1e35, 1e35)]; - for &(low, high) in v.iter() { + for &(low, high) in v { let mut sampler: Range<$ty> = Range::new(low, high); for _ in 0u..1000 { let v = sampler.sample(&mut rng); diff --git a/src/librand/isaac.rs b/src/librand/isaac.rs index a22ef704fa5..d0f4afdde72 100644 --- a/src/librand/isaac.rs +++ b/src/librand/isaac.rs @@ -134,7 +134,7 @@ impl IsaacRng { } let r = [(0, MIDPOINT), (MIDPOINT, 0)]; - for &(mr_offset, m2_offset) in r.iter() { + for &(mr_offset, m2_offset) in &r { macro_rules! rngstepp { ($j:expr, $shift:expr) => {{ @@ -373,7 +373,7 @@ impl Isaac64Rng { } } - for &(mr_offset, m2_offset) in MP_VEC.iter() { + for &(mr_offset, m2_offset) in &MP_VEC { for base in (0..MIDPOINT / 4).map(|i| i * 4) { macro_rules! rngstepp { diff --git a/src/librand/reseeding.rs b/src/librand/reseeding.rs index 57e90139876..75ac1b2cf44 100644 --- a/src/librand/reseeding.rs +++ b/src/librand/reseeding.rs @@ -225,7 +225,7 @@ mod test { // To test that `fill_bytes` actually did something, check that the // average of `v` is not 0. let mut sum = 0.0; - for &x in v.iter() { + for &x in &v { sum += x as f64; } assert!(sum / v.len() as f64 != 0.0); diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 904c9c3adb5..57380ec2797 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -459,7 +459,7 @@ impl LintPass for ImproperCTypes { } fn check_foreign_fn(cx: &Context, decl: &ast::FnDecl) { - for input in decl.inputs.iter() { + for input in &decl.inputs { check_ty(cx, &*input.ty); } if let ast::Return(ref ret_ty) = decl.output { @@ -469,7 +469,7 @@ impl LintPass for ImproperCTypes { match it.node { ast::ItemForeignMod(ref nmod) if nmod.abi != abi::RustIntrinsic => { - for ni in nmod.items.iter() { + for ni in &nmod.items { match ni.node { ast::ForeignItemFn(ref decl, _) => check_foreign_fn(cx, &**decl), ast::ForeignItemStatic(ref t, _) => check_ty(cx, &**t) @@ -532,7 +532,7 @@ impl LintPass for BoxPointers { // If it's a struct, we also have to check the fields' types match it.node { ast::ItemStruct(ref struct_def, _) => { - for struct_field in struct_def.fields.iter() { + for struct_field in &struct_def.fields { self.check_heap_type(cx, struct_field.span, ty::node_id_to_type(cx.tcx, struct_field.node.id)); } @@ -683,7 +683,7 @@ impl LintPass for UnusedAttributes { "no_builtins", ]; - for &name in ATTRIBUTE_WHITELIST.iter() { + for &name in ATTRIBUTE_WHITELIST { if attr.check_name(name) { break; } @@ -785,7 +785,7 @@ impl LintPass for UnusedResults { } fn check_must_use(cx: &Context, attrs: &[ast::Attribute], sp: Span) -> bool { - for attr in attrs.iter() { + for attr in attrs { if attr.check_name("must_use") { let mut msg = "unused result which must be used".to_string(); // check for #[must_use="..."] @@ -869,7 +869,7 @@ impl LintPass for NonCamelCaseTypes { ast::ItemEnum(ref enum_definition, _) => { if has_extern_repr { return } self.check_case(cx, "type", it.ident, it.span); - for variant in enum_definition.variants.iter() { + for variant in &enum_definition.variants { self.check_case(cx, "variant", variant.node.name, variant.span); } } @@ -878,7 +878,7 @@ impl LintPass for NonCamelCaseTypes { } fn check_generics(&mut self, cx: &Context, it: &ast::Generics) { - for gen in it.ty_params.iter() { + for gen in &*it.ty_params { self.check_case(cx, "type parameter", gen.ident, gen.span); } } @@ -1048,7 +1048,7 @@ impl LintPass for NonSnakeCase { fn check_struct_def(&mut self, cx: &Context, s: &ast::StructDef, _: ast::Ident, _: &ast::Generics, _: ast::NodeId) { - for sf in s.fields.iter() { + for sf in &s.fields { if let ast::StructField_ { kind: ast::NamedField(ident, _), .. } = sf.node { self.check_snake_case(cx, "structure field", ident, sf.span); } @@ -1346,7 +1346,7 @@ impl UnusedMut { // avoid false warnings in match arms with multiple patterns let mut mutables = FnvHashMap(); - for p in pats.iter() { + for p in pats { pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| { let ident = path1.node; if let ast::BindByValue(ast::MutMutable) = mode { @@ -1361,7 +1361,7 @@ impl UnusedMut { } let used_mutables = cx.tcx.used_mut_nodes.borrow(); - for (_, v) in mutables.iter() { + for (_, v) in &mutables { if !v.iter().any(|e| used_mutables.contains(e)) { cx.span_lint(UNUSED_MUT, cx.tcx.map.span(v[0]), "variable does not need to be mutable"); @@ -1377,7 +1377,7 @@ impl LintPass for UnusedMut { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprMatch(_, ref arms, _) = e.node { - for a in arms.iter() { + for a in arms { self.check_unused_mut_pat(cx, &a.pats[]) } } @@ -1394,7 +1394,7 @@ impl LintPass for UnusedMut { fn check_fn(&mut self, cx: &Context, _: visit::FnKind, decl: &ast::FnDecl, _: &ast::Block, _: Span, _: ast::NodeId) { - for a in decl.inputs.iter() { + for a in &decl.inputs { self.check_unused_mut_pat(cx, slice::ref_slice(&a.pat)); } } @@ -1871,7 +1871,7 @@ impl LintPass for UnconditionalRecursion { if cx.current_level(UNCONDITIONAL_RECURSION) != Level::Allow { let sess = cx.sess(); // offer some help to the programmer. - for call in self_call_spans.iter() { + for call in &self_call_spans { sess.span_note(*call, "recursive call site") } sess.span_help(sp, "a `loop` may express intention better if this is on purpose") diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index c649ff2635b..4ed61c6ffa1 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -116,7 +116,7 @@ impl LintStore { pub fn register_pass(&mut self, sess: Option<&Session>, from_plugin: bool, pass: LintPassObject) { - for &lint in pass.get_lints().iter() { + for &lint in pass.get_lints() { self.lints.push((*lint, from_plugin)); let id = LintId::of(*lint); @@ -260,7 +260,7 @@ impl LintStore { } pub fn process_command_line(&mut self, sess: &Session) { - for &(ref lint_name, level) in sess.opts.lint_opts.iter() { + for &(ref lint_name, level) in &sess.opts.lint_opts { match self.find_lint(&lint_name[], sess, None) { Some(lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { @@ -340,7 +340,7 @@ macro_rules! run_lints { ($cx:expr, $f:ident, $($args:expr),*) => ({ pub fn gather_attrs(attrs: &[ast::Attribute]) -> Vec<Result<(InternedString, Level, Span), Span>> { let mut out = vec!(); - for attr in attrs.iter() { + for attr in attrs { let level = match Level::from_str(attr.name().get()) { None => continue, Some(lvl) => lvl, @@ -357,7 +357,7 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) } }; - for meta in metas.iter() { + for meta in metas { out.push(match meta.node { ast::MetaWord(ref lint_name) => Ok((lint_name.clone(), level, meta.span)), _ => Err(meta.span), @@ -794,8 +794,8 @@ pub fn check_crate(tcx: &ty::ctxt, // If we missed any lints added to the session, then there's a bug somewhere // in the iteration code. - for (id, v) in tcx.sess.lints.borrow().iter() { - for &(lint, span, ref msg) in v.iter() { + for (id, v) in &*tcx.sess.lints.borrow() { + for &(lint, span, ref msg) in v { tcx.sess.span_bug(span, format!("unprocessed lint {} at {}: {}", lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice()) diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 7b71120ba64..15229b6618f 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -162,7 +162,7 @@ impl<'a> CrateReader<'a> { dump_crates(&self.sess.cstore); } - for &(ref name, kind) in self.sess.opts.libs.iter() { + for &(ref name, kind) in &self.sess.opts.libs { register_native_lib(self.sess, None, name.clone(), kind); } } @@ -235,7 +235,7 @@ impl<'a> CrateReader<'a> { None }) .collect::<Vec<&ast::Attribute>>(); - for m in link_args.iter() { + for m in &link_args { match m.value_str() { Some(linkarg) => self.sess.cstore.add_used_link_args(linkarg.get()), None => { /* fallthrough */ } @@ -250,7 +250,7 @@ impl<'a> CrateReader<'a> { None }) .collect::<Vec<&ast::Attribute>>(); - for m in link_args.iter() { + for m in &link_args { match m.meta_item_list() { Some(items) => { let kind = items.iter().find(|k| { diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 1295970d667..619cfc1b52c 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -382,7 +382,7 @@ pub fn get_stability(cstore: &cstore::CStore, pub fn is_staged_api(cstore: &cstore::CStore, def: ast::DefId) -> bool { let cdata = cstore.get_crate_data(def.krate); let attrs = decoder::get_crate_attributes(cdata.data()); - for attr in attrs.iter() { + for attr in &attrs { if attr.name().get() == "staged_api" { match attr.node.value.node { ast::MetaWord(_) => return true, _ => (/*pass*/) } } diff --git a/src/librustc/metadata/cstore.rs b/src/librustc/metadata/cstore.rs index 40242f52493..0a3e173b35e 100644 --- a/src/librustc/metadata/cstore.rs +++ b/src/librustc/metadata/cstore.rs @@ -113,7 +113,7 @@ impl CStore { pub fn iter_crate_data<I>(&self, mut i: I) where I: FnMut(ast::CrateNum, &crate_metadata), { - for (&k, v) in self.metas.borrow().iter() { + for (&k, v) in &*self.metas.borrow() { i(k, &**v); } } @@ -122,7 +122,7 @@ impl CStore { pub fn iter_crate_data_origins<I>(&self, mut i: I) where I: FnMut(ast::CrateNum, &crate_metadata, Option<CrateSource>), { - for (&k, v) in self.metas.borrow().iter() { + for (&k, v) in &*self.metas.borrow() { let origin = self.get_used_crate_source(k); origin.as_ref().map(|cs| { assert!(k == cs.cnum); }); i(k, &**v, origin); @@ -167,12 +167,12 @@ impl CStore { ordering: &mut Vec<ast::CrateNum>) { if ordering.contains(&cnum) { return } let meta = cstore.get_crate_data(cnum); - for (_, &dep) in meta.cnum_map.iter() { + for (_, &dep) in &meta.cnum_map { visit(cstore, dep, ordering); } ordering.push(cnum); }; - for (&num, _) in self.metas.borrow().iter() { + for (&num, _) in &*self.metas.borrow() { visit(self, num, &mut ordering); } ordering.reverse(); diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 94fe99ff07d..3a70490771e 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -1022,7 +1022,7 @@ pub fn get_methods_if_impl(intr: Rc<IdentInterner>, }); let mut impl_methods = Vec::new(); - for impl_method_id in impl_method_ids.iter() { + for impl_method_id in &impl_method_ids { let impl_method_doc = lookup_item(impl_method_id.node, cdata.data()); let family = item_family(impl_method_doc); match family { @@ -1189,7 +1189,7 @@ fn list_crate_attributes(md: rbml::Doc, hash: &Svh, try!(write!(out, "=Crate Attributes ({})=\n", *hash)); let r = get_attributes(md); - for attr in r.iter() { + for attr in &r { try!(write!(out, "{}\n", pprust::attribute_to_string(attr))); } @@ -1232,7 +1232,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> { fn list_crate_deps(data: &[u8], out: &mut old_io::Writer) -> old_io::IoResult<()> { try!(write!(out, "=External Dependencies=\n")); - for dep in get_crate_deps(data).iter() { + for dep in &get_crate_deps(data) { try!(write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash)); } try!(write!(out, "\n")); diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 7ad91d4d71c..117ab4c8a5a 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -288,7 +288,7 @@ fn encode_parent_item(rbml_w: &mut Encoder, id: DefId) { fn encode_struct_fields(rbml_w: &mut Encoder, fields: &[ty::field_ty], origin: DefId) { - for f in fields.iter() { + for f in fields { if f.name == special_idents::unnamed_field.name { rbml_w.start_tag(tag_item_unnamed_field); } else { @@ -316,7 +316,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext, let mut i = 0; let vi = ty::enum_variants(ecx.tcx, DefId { krate: ast::LOCAL_CRATE, node: id }); - for variant in variants.iter() { + for variant in variants { let def_id = local_def(variant.node.id); index.push(entry { val: variant.node.id as i64, @@ -367,7 +367,7 @@ fn encode_path<PI: Iterator<Item=PathElem>>(rbml_w: &mut Encoder, path: PI) { let path = path.collect::<Vec<_>>(); rbml_w.start_tag(tag_path); rbml_w.wr_tagged_u32(tag_path_len, path.len() as u32); - for pe in path.iter() { + for pe in &path { let tag = match *pe { ast_map::PathMod(_) => tag_path_elem_mod, ast_map::PathName(_) => tag_path_elem_name @@ -402,8 +402,8 @@ fn encode_reexported_static_base_methods(ecx: &EncodeContext, let impl_items = ecx.tcx.impl_items.borrow(); match ecx.tcx.inherent_impls.borrow().get(&exp.def_id) { Some(implementations) => { - for base_impl_did in implementations.iter() { - for &method_did in (*impl_items)[*base_impl_did].iter() { + for base_impl_did in &**implementations { + for &method_did in &*(*impl_items)[*base_impl_did] { let impl_item = ty::impl_or_trait_item( ecx.tcx, method_did.def_id()); @@ -431,7 +431,7 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext, -> bool { match ecx.tcx.trait_items_cache.borrow().get(&exp.def_id) { Some(trait_items) => { - for trait_item in trait_items.iter() { + for trait_item in &**trait_items { if let ty::MethodTraitItem(ref m) = *trait_item { encode_reexported_static_method(rbml_w, exp, @@ -517,9 +517,9 @@ fn encode_reexports(ecx: &EncodeContext, path: PathElems) { debug!("(encoding info for module) encoding reexports for {}", id); match ecx.reexports.get(&id) { - Some(ref exports) => { + Some(exports) => { debug!("(encoding info for module) found reexports for {}", id); - for exp in exports.iter() { + for exp in exports { debug!("(encoding info for module) reexport '{}' ({}/{}) for \ {}", exp.name, @@ -559,7 +559,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, debug!("(encoding info for module) encoding info for module ID {}", id); // Encode info about all the module children. - for item in md.items.iter() { + for item in &md.items { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(&def_to_string(local_def(item.id))[]); rbml_w.end_tag(); @@ -665,9 +665,9 @@ fn encode_parent_sort(rbml_w: &mut Encoder, sort: char) { fn encode_provided_source(rbml_w: &mut Encoder, source_opt: Option<DefId>) { - for source in source_opt.iter() { + if let Some(source) = source_opt { rbml_w.start_tag(tag_item_method_provided_source); - let s = def_to_string(*source); + let s = def_to_string(source); rbml_w.writer.write_all(s.as_bytes()); rbml_w.end_tag(); } @@ -684,7 +684,7 @@ fn encode_info_for_struct(ecx: &EncodeContext, let mut index = Vec::new(); /* We encode both private and public fields -- need to include private fields to get the offsets right */ - for field in fields.iter() { + for field in fields { let nm = field.name; let id = field.id.node; @@ -783,7 +783,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, rbml_w.wr_tagged_u64(tag_region_param_def_index, param.index as u64); - for &bound_region in param.bounds.iter() { + for &bound_region in ¶m.bounds { encode_region(ecx, rbml_w, bound_region); } @@ -911,7 +911,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext, fn encode_method_argument_names(rbml_w: &mut Encoder, decl: &ast::FnDecl) { rbml_w.start_tag(tag_method_argument_names); - for arg in decl.inputs.iter() { + for arg in &decl.inputs { rbml_w.start_tag(tag_method_argument_name); if let ast::PatIdent(_, ref path1, _) = arg.pat.node { let name = token::get_ident(path1.node); @@ -926,7 +926,7 @@ fn encode_repr_attrs(rbml_w: &mut Encoder, ecx: &EncodeContext, attrs: &[ast::Attribute]) { let mut repr_attrs = Vec::new(); - for attr in attrs.iter() { + for attr in attrs { repr_attrs.extend(attr::find_repr_attrs(ecx.tcx.sess.diagnostic(), attr).into_iter()); } @@ -962,7 +962,7 @@ fn encode_inherent_implementations(ecx: &EncodeContext, match ecx.tcx.inherent_impls.borrow().get(&def_id) { None => {} Some(implementations) => { - for &impl_def_id in implementations.iter() { + for &impl_def_id in &**implementations { rbml_w.start_tag(tag_items_data_item_inherent_impl); encode_def_id(rbml_w, impl_def_id); rbml_w.end_tag(); @@ -978,7 +978,7 @@ fn encode_extension_implementations(ecx: &EncodeContext, match ecx.tcx.trait_impls.borrow().get(&trait_def_id) { None => {} Some(implementations) => { - for &impl_def_id in implementations.borrow().iter() { + for &impl_def_id in &*implementations.borrow() { rbml_w.start_tag(tag_items_data_item_extension_impl); encode_def_id(rbml_w, impl_def_id); rbml_w.end_tag(); @@ -1091,7 +1091,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_path(rbml_w, path); // Encode all the items in this module. - for foreign_item in fm.items.iter() { + for foreign_item in &fm.items { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(&def_to_string(local_def(foreign_item.id))[]); rbml_w.end_tag(); @@ -1123,7 +1123,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_name(rbml_w, item.ident.name); encode_attributes(rbml_w, &item.attrs[]); encode_repr_attrs(rbml_w, ecx, &item.attrs[]); - for v in (*enum_definition).variants.iter() { + for v in &enum_definition.variants { encode_variant_id(rbml_w, local_def(v.node.id)); } encode_inlined_item(ecx, rbml_w, IIItemRef(item)); @@ -1216,7 +1216,7 @@ fn encode_info_for_item(ecx: &EncodeContext, } _ => {} } - for &item_def_id in items.iter() { + for &item_def_id in items { rbml_w.start_tag(tag_item_impl_item); match item_def_id { ty::MethodTraitItemId(item_def_id) => { @@ -1230,7 +1230,7 @@ fn encode_info_for_item(ecx: &EncodeContext, } rbml_w.end_tag(); } - for ast_trait_ref in opt_trait.iter() { + if let Some(ref ast_trait_ref) = *opt_trait { let trait_ref = ty::node_id_to_trait_ref( tcx, ast_trait_ref.ref_id); encode_trait_ref(rbml_w, ecx, &*trait_ref, tag_item_trait_ref); @@ -1314,7 +1314,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_attributes(rbml_w, &item.attrs[]); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); - for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() { + for &method_def_id in &*ty::trait_item_def_ids(tcx, def_id) { rbml_w.start_tag(tag_item_trait_item); match method_def_id { ty::MethodTraitItemId(method_def_id) => { @@ -1599,10 +1599,10 @@ fn encode_index<T, F>(rbml_w: &mut Encoder, index: Vec<entry<T>>, mut write_fn: rbml_w.start_tag(tag_index); let mut bucket_locs = Vec::new(); rbml_w.start_tag(tag_index_buckets); - for bucket in buckets.iter() { + for bucket in &buckets { bucket_locs.push(rbml_w.writer.tell().unwrap()); rbml_w.start_tag(tag_index_buckets_bucket); - for elt in bucket.iter() { + for elt in bucket { rbml_w.start_tag(tag_index_buckets_bucket_elt); assert!(elt.pos < 0xffff_ffff); { @@ -1616,7 +1616,7 @@ fn encode_index<T, F>(rbml_w: &mut Encoder, index: Vec<entry<T>>, mut write_fn: } rbml_w.end_tag(); rbml_w.start_tag(tag_index_table); - for pos in bucket_locs.iter() { + for pos in &bucket_locs { assert!(*pos < 0xffff_ffff); let wr: &mut SeekableMemWriter = rbml_w.writer; wr.write_be_u32(*pos as u32); @@ -1660,7 +1660,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) { rbml_w.start_tag(tag_meta_item_name); rbml_w.writer.write_all(name.get().as_bytes()); rbml_w.end_tag(); - for inner_item in items.iter() { + for inner_item in items { encode_meta_item(rbml_w, &**inner_item); } rbml_w.end_tag(); @@ -1670,7 +1670,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) { fn encode_attributes(rbml_w: &mut Encoder, attrs: &[ast::Attribute]) { rbml_w.start_tag(tag_attributes); - for attr in attrs.iter() { + for attr in attrs { rbml_w.start_tag(tag_attribute); rbml_w.wr_tagged_u8(tag_attribute_is_sugared_doc, attr.node.is_sugared_doc as u8); encode_meta_item(rbml_w, &*attr.node.value); @@ -1694,7 +1694,7 @@ fn encode_paren_sugar(rbml_w: &mut Encoder, paren_sugar: bool) { fn encode_associated_type_names(rbml_w: &mut Encoder, names: &[ast::Name]) { rbml_w.start_tag(tag_associated_type_names); - for &name in names.iter() { + for &name in names { rbml_w.wr_tagged_str(tag_associated_type_name, token::get_name(name).get()); } rbml_w.end_tag(); @@ -1726,7 +1726,7 @@ fn encode_crate_deps(rbml_w: &mut Encoder, cstore: &cstore::CStore) { // Sanity-check the crate numbers let mut expected_cnum = 1; - for n in deps.iter() { + for n in &deps { assert_eq!(n.cnum, expected_cnum); expected_cnum += 1; } @@ -1740,7 +1740,7 @@ fn encode_crate_deps(rbml_w: &mut Encoder, cstore: &cstore::CStore) { // but is enough to get transitive crate dependencies working. rbml_w.start_tag(tag_crate_deps); let r = get_ordered_deps(cstore); - for dep in r.iter() { + for dep in &r { encode_crate_dep(rbml_w, (*dep).clone()); } rbml_w.end_tag(); @@ -1749,8 +1749,8 @@ fn encode_crate_deps(rbml_w: &mut Encoder, cstore: &cstore::CStore) { fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) { rbml_w.start_tag(tag_lang_items); - for (i, def_id) in ecx.tcx.lang_items.items() { - for id in def_id.iter() { + for (i, &def_id) in ecx.tcx.lang_items.items() { + if let Some(id) = def_id { if id.krate == ast::LOCAL_CRATE { rbml_w.start_tag(tag_lang_items_item); @@ -1773,7 +1773,7 @@ fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) { } } - for i in ecx.tcx.lang_items.missing.iter() { + for i in &ecx.tcx.lang_items.missing { rbml_w.wr_tagged_u32(tag_lang_items_missing, *i as u32); } @@ -1817,7 +1817,7 @@ fn encode_plugin_registrar_fn(ecx: &EncodeContext, rbml_w: &mut Encoder) { fn encode_macro_defs(rbml_w: &mut Encoder, krate: &ast::Crate) { rbml_w.start_tag(tag_macro_defs); - for def in krate.exported_macros.iter() { + for def in &krate.exported_macros { rbml_w.start_tag(tag_macro_def); encode_name(rbml_w, def.ident.name); @@ -1911,7 +1911,7 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w: &mut Encoder) { rbml_w.start_tag(tag_misc_info); rbml_w.start_tag(tag_misc_info_crate_items); - for item in krate.module.items.iter() { + for item in &krate.module.items { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(&def_to_string(local_def(item.id))[]); rbml_w.end_tag(); @@ -1935,7 +1935,7 @@ fn encode_misc_info(ecx: &EncodeContext, fn encode_reachable_extern_fns(ecx: &EncodeContext, rbml_w: &mut Encoder) { rbml_w.start_tag(tag_reachable_extern_fns); - for id in ecx.reachable.iter() { + for id in ecx.reachable { if let Some(ast_map::NodeItem(i)) = ecx.tcx.map.find(*id) { if let ast::ItemFn(_, _, abi, ref generics, _) = i.node { if abi != abi::Rust && !generics.is_type_parameterized() { @@ -2150,7 +2150,7 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, stats.total_bytes = rbml_w.writer.tell().unwrap(); if tcx.sess.meta_stats() { - for e in rbml_w.writer.get_ref().iter() { + for e in rbml_w.writer.get_ref() { if *e == 0 { stats.zero_bytes += 1; } diff --git a/src/librustc/metadata/filesearch.rs b/src/librustc/metadata/filesearch.rs index 26046cfb43d..6197846ec6f 100644 --- a/src/librustc/metadata/filesearch.rs +++ b/src/librustc/metadata/filesearch.rs @@ -66,7 +66,7 @@ impl<'a> FileSearch<'a> { // Try RUST_PATH if !found { let rustpath = rust_path(); - for path in rustpath.iter() { + for path in &rustpath { let tlib_path = make_rustpkg_lib_path( self.sysroot, path, self.triple); debug!("is {} in visited_dirs? {}", tlib_path.display(), @@ -243,8 +243,7 @@ pub fn rust_path() -> Vec<Path> { } cwd.pop(); } - let h = os::homedir(); - for h in h.iter() { + if let Some(h) = os::homedir() { let p = h.join(".rust"); if !env_rust_path.contains(&p) && p.exists() { env_rust_path.push(p); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 09957f58bcc..f219bfffcb8 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -452,7 +452,7 @@ impl<'a> Context<'a> { &format!("multiple matching crates for `{}`", self.crate_name)[]); self.sess.note("candidates:"); - for lib in libraries.iter() { + for lib in &libraries { match lib.dylib { Some((ref p, _)) => { self.sess.note(&format!("path: {}", diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index ac6d2d0174c..4c0aefaf83d 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -249,7 +249,7 @@ fn parse_vec_per_param_space<'a, 'tcx, T, F>(st: &mut PState<'a, 'tcx>, F: FnMut(&mut PState<'a, 'tcx>) -> T, { let mut r = VecPerParamSpace::empty(); - for &space in subst::ParamSpace::all().iter() { + for &space in &subst::ParamSpace::all() { assert_eq!(next(st), '['); while peek(st) != ']' { r.push(space, f(st)); diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 2dc334bfe95..f8081e2c309 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -97,7 +97,7 @@ pub fn enc_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, t: Ty<'t } ty::ty_tup(ref ts) => { mywrite!(w, "T["); - for t in ts.iter() { enc_ty(w, cx, *t); } + for t in ts { enc_ty(w, cx, *t); } mywrite!(w, "]"); } ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); } @@ -206,9 +206,9 @@ fn enc_vec_per_param_space<'a, 'tcx, T, F>(w: &mut SeekableMemWriter, mut op: F) where F: FnMut(&mut SeekableMemWriter, &ctxt<'a, 'tcx>, &T), { - for &space in subst::ParamSpace::all().iter() { + for &space in &subst::ParamSpace::all() { mywrite!(w, "["); - for t in v.get_slice(space).iter() { + for t in v.get_slice(space) { op(w, cx, t); } mywrite!(w, "]"); @@ -337,7 +337,7 @@ pub fn enc_closure_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, fn enc_fn_sig<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, fsig: &ty::PolyFnSig<'tcx>) { mywrite!(w, "["); - for ty in fsig.0.inputs.iter() { + for ty in &fsig.0.inputs { enc_ty(w, cx, *ty); } mywrite!(w, "]"); @@ -357,7 +357,7 @@ fn enc_fn_sig<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, } pub fn enc_builtin_bounds(w: &mut SeekableMemWriter, _cx: &ctxt, bs: &ty::BuiltinBounds) { - for bound in bs.iter() { + for bound in bs { match bound { ty::BoundSend => mywrite!(w, "S"), ty::BoundSized => mywrite!(w, "Z"), @@ -383,17 +383,17 @@ pub fn enc_bounds<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, bs: &ty::ParamBounds<'tcx>) { enc_builtin_bounds(w, cx, &bs.builtin_bounds); - for &r in bs.region_bounds.iter() { + for &r in &bs.region_bounds { mywrite!(w, "R"); enc_region(w, cx, r); } - for tp in bs.trait_bounds.iter() { + for tp in &bs.trait_bounds { mywrite!(w, "I"); enc_trait_ref(w, cx, &*tp.0); } - for tp in bs.projection_bounds.iter() { + for tp in &bs.projection_bounds { mywrite!(w, "P"); enc_projection_predicate(w, cx, &tp.0); } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 6d9b9c23504..4130195ae40 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -766,7 +766,7 @@ fn encode_vec_per_param_space<T, F>(rbml_w: &mut Encoder, mut f: F) where F: FnMut(&mut Encoder, &T), { - for &space in subst::ParamSpace::all().iter() { + for &space in &subst::ParamSpace::all() { rbml_w.emit_from_vec(v.get_slice(space), |rbml_w, n| Ok(f(rbml_w, n))).unwrap(); } @@ -1156,14 +1156,14 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, debug!("Encoding side tables for id {}", id); - for def in tcx.def_map.borrow().get(&id).iter() { + if let Some(def) = tcx.def_map.borrow().get(&id) { rbml_w.tag(c::tag_table_def, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| (*def).encode(rbml_w).unwrap()); }) } - for &ty in tcx.node_types.borrow().get(&id).iter() { + if let Some(ty) = tcx.node_types.borrow().get(&id) { rbml_w.tag(c::tag_table_node_type, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1172,7 +1172,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &item_substs in tcx.item_substs.borrow().get(&id).iter() { + if let Some(item_substs) = tcx.item_substs.borrow().get(&id) { rbml_w.tag(c::tag_table_item_subst, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1181,7 +1181,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &fv in tcx.freevars.borrow().get(&id).iter() { + if let Some(fv) = tcx.freevars.borrow().get(&id) { rbml_w.tag(c::tag_table_freevars, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1191,7 +1191,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) }); - for freevar in fv.iter() { + for freevar in fv { rbml_w.tag(c::tag_table_upvar_capture_map, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1209,7 +1209,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, } let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id }; - for &type_scheme in tcx.tcache.borrow().get(&lid).iter() { + if let Some(type_scheme) = tcx.tcache.borrow().get(&lid) { rbml_w.tag(c::tag_table_tcache, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1218,7 +1218,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &type_param_def in tcx.ty_param_defs.borrow().get(&id).iter() { + if let Some(type_param_def) = tcx.ty_param_defs.borrow().get(&id) { rbml_w.tag(c::tag_table_param_defs, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1228,7 +1228,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, } let method_call = MethodCall::expr(id); - for &method in tcx.method_map.borrow().get(&method_call).iter() { + if let Some(method) = tcx.method_map.borrow().get(&method_call) { rbml_w.tag(c::tag_table_method_map, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1237,7 +1237,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &trait_ref in tcx.object_cast_map.borrow().get(&id).iter() { + if let Some(trait_ref) = tcx.object_cast_map.borrow().get(&id) { rbml_w.tag(c::tag_table_object_cast_map, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1246,11 +1246,11 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &adjustment in tcx.adjustments.borrow().get(&id).iter() { + if let Some(adjustment) = tcx.adjustments.borrow().get(&id) { match *adjustment { _ if ty::adjust_is_object(adjustment) => { let method_call = MethodCall::autoobject(id); - for &method in tcx.method_map.borrow().get(&method_call).iter() { + if let Some(method) = tcx.method_map.borrow().get(&method_call) { rbml_w.tag(c::tag_table_method_map, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1263,7 +1263,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, assert!(!ty::adjust_is_object(adjustment)); for autoderef in 0..adj.autoderefs { let method_call = MethodCall::autoderef(id, autoderef); - for &method in tcx.method_map.borrow().get(&method_call).iter() { + if let Some(method) = tcx.method_map.borrow().get(&method_call) { rbml_w.tag(c::tag_table_method_map, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1287,7 +1287,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &closure_type in tcx.closure_tys.borrow().get(&ast_util::local_def(id)).iter() { + if let Some(closure_type) = tcx.closure_tys.borrow().get(&ast_util::local_def(id)) { rbml_w.tag(c::tag_table_closure_tys, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { @@ -1296,11 +1296,11 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, }) } - for &&closure_kind in tcx.closure_kinds.borrow().get(&ast_util::local_def(id)).iter() { + if let Some(closure_kind) = tcx.closure_kinds.borrow().get(&ast_util::local_def(id)) { rbml_w.tag(c::tag_table_closure_kinds, |rbml_w| { rbml_w.id(id); rbml_w.tag(c::tag_table_val, |rbml_w| { - encode_closure_kind(rbml_w, closure_kind) + encode_closure_kind(rbml_w, *closure_kind) }) }) } diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 0a575a31ead..d39b94a202e 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -68,7 +68,7 @@ fn add_initial_dummy_node(g: &mut CFGGraph) -> CFGIndex { impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex { let mut stmts_exit = pred; - for stmt in blk.stmts.iter() { + for stmt in &blk.stmts { stmts_exit = self.stmt(&**stmt, stmts_exit); } @@ -166,7 +166,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.pat(&*pats[0], pred) } else { let collect = self.add_dummy_node(&[]); - for pat in pats.iter() { + for pat in pats { let pat_exit = self.pat(&**pat, pred); self.add_contained_edge(pat_exit, collect); } @@ -325,7 +325,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let expr_exit = self.add_node(expr.id, &[]); let mut cond_exit = discr_exit; - for arm in arms.iter() { + for arm in arms { cond_exit = self.add_dummy_node(&[cond_exit]); // 2 let pats_exit = self.pats_any(&arm.pats[], cond_exit); // 3 @@ -522,7 +522,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { assert!(!self.exit_map.contains_key(&id)); self.exit_map.insert(id, node); } - for &pred in preds.iter() { + for &pred in preds { self.add_contained_edge(pred, node); } node @@ -574,7 +574,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { Some(_) => { match self.tcx.def_map.borrow().get(&expr.id) { Some(&def::DefLabel(loop_id)) => { - for l in self.loop_scopes.iter() { + for l in &self.loop_scopes { if l.loop_id == loop_id { return *l; } diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index 202020a9033..925bd5b6395 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -46,7 +46,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { } ast::ItemEnum(ref enum_definition, _) => { self.inside_const(|v| { - for var in enum_definition.variants.iter() { + for var in &enum_definition.variants { if let Some(ref ex) = var.node.disr_expr { v.visit_expr(&**ex); } @@ -137,7 +137,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &ast::Expr) { } ast::ExprBlock(ref block) => { // Check all statements in the block - for stmt in block.stmts.iter() { + for stmt in &block.stmts { let block_span_err = |&: span| span_err!(v.tcx.sess, span, E0016, "blocks in constants are limited to items and \ diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 6de517b2906..72551daa4e6 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -157,7 +157,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { visit::walk_expr(cx, ex); match ex.node { ast::ExprMatch(ref scrut, ref arms, source) => { - for arm in arms.iter() { + for arm in arms { // First, check legality of move bindings. check_legality_of_move_bindings(cx, arm.guard.is_some(), @@ -285,8 +285,8 @@ fn check_arms(cx: &MatchCheckCtxt, source: ast::MatchSource) { let mut seen = Matrix(vec![]); let mut printed_if_let_err = false; - for &(ref pats, guard) in arms.iter() { - for pat in pats.iter() { + for &(ref pats, guard) in arms { + for pat in pats { let v = vec![&**pat]; match is_useful(cx, &seen, &v[], LeaveOutWitness) { @@ -979,7 +979,7 @@ fn check_fn(cx: &mut MatchCheckCtxt, visit::walk_fn(cx, kind, decl, body, sp); - for input in decl.inputs.iter() { + for input in &decl.inputs { is_refutable(cx, &*input.pat, |pat| { span_err!(cx.tcx.sess, input.pat.span, E0006, "refutable pattern in function argument: `{}` not covered", @@ -1012,7 +1012,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, let tcx = cx.tcx; let def_map = &tcx.def_map; let mut by_ref_span = None; - for pat in pats.iter() { + for pat in pats { pat_bindings(def_map, &**pat, |bm, _, span, _path| { match bm { ast::BindByRef(_) => { @@ -1039,7 +1039,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, } }; - for pat in pats.iter() { + for pat in pats { walk_pat(&**pat, |p| { if pat_is_binding(def_map, &*p) { match p.node { diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 00141903c7c..fa5d5227be5 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -104,7 +104,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, -> Option<&'a Expr> { fn variant_expr<'a>(variants: &'a [P<ast::Variant>], id: ast::NodeId) -> Option<&'a Expr> { - for variant in variants.iter() { + for variant in variants { if variant.node.id == id { return variant.node.disr_expr.as_ref().map(|e| &**e); } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index 3e2e81077da..a06a57beb61 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -399,7 +399,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let mut orig_kills = self.kills[start.. end].to_vec(); let mut changed = false; - for &node_id in edge.data.exiting_scopes.iter() { + for &node_id in &edge.data.exiting_scopes { let opt_cfg_idx = self.nodeid_to_index.get(&node_id).map(|&i|i); match opt_cfg_idx { Some(cfg_idx) => { @@ -550,7 +550,7 @@ fn bits_to_string(words: &[uint]) -> String { // Note: this is a little endian printout of bytes. - for &word in words.iter() { + for &word in words { let mut v = word; for _ in 0..uint::BYTES { result.push(sep); diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 3db931002d2..6bad7f59441 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -173,7 +173,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } }; let fields = ty::lookup_struct_fields(self.tcx, id); - for pat in pats.iter() { + for pat in pats { let field_id = fields.iter() .find(|field| field.name == pat.node.ident.name).unwrap().id; self.live_symbols.insert(field_id.node); @@ -356,7 +356,7 @@ impl<'v> Visitor<'v> for LifeSeeder { self.worklist.extend(enum_def.variants.iter().map(|variant| variant.node.id)); } ast::ItemImpl(_, _, _, Some(ref _trait_ref), _, ref impl_items) => { - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { self.worklist.push(method.id); @@ -397,10 +397,10 @@ fn create_and_seed_worklist(tcx: &ty::ctxt, // depending on whether a crate is built as bin or lib, and we want // the warning to be consistent, we also seed the worklist with // exported symbols. - for id in exported_items.iter() { + for id in exported_items { worklist.push(*id); } - for id in reachable_symbols.iter() { + for id in reachable_symbols { worklist.push(*id); } @@ -499,8 +499,8 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { match self.tcx.inherent_impls.borrow().get(&local_def(id)) { None => (), Some(impl_list) => { - for impl_did in impl_list.iter() { - for item_did in (*impl_items)[*impl_did].iter() { + for impl_did in &**impl_list { + for item_did in &(*impl_items)[*impl_did] { if self.live_symbols.contains(&item_did.def_id() .node) { return true; @@ -536,7 +536,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for DeadVisitor<'a, 'tcx> { } else { match item.node { ast::ItemEnum(ref enum_def, _) => { - for variant in enum_def.variants.iter() { + for variant in &enum_def.variants { if self.should_warn_about_variant(&variant.node) { self.warn_dead_code(variant.node.id, variant.span, variant.node.name, "variant"); diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 16b7d6134c3..6d35a82d153 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -85,7 +85,7 @@ pub type Dependencies = FnvHashMap<config::CrateType, DependencyList>; pub fn calculate(tcx: &ty::ctxt) { let mut fmts = tcx.dependency_formats.borrow_mut(); - for &ty in tcx.sess.crate_types.borrow().iter() { + for &ty in &*tcx.sess.crate_types.borrow() { fmts.insert(ty, calculate_type(&tcx.sess, ty)); } tcx.sess.abort_if_errors(); @@ -148,7 +148,7 @@ fn calculate_type(sess: &session::Session, debug!("adding dylib: {}", data.name); add_library(sess, cnum, cstore::RequireDynamic, &mut formats); let deps = csearch::get_dylib_dependency_formats(&sess.cstore, cnum); - for &(depnum, style) in deps.iter() { + for &(depnum, style) in &deps { debug!("adding {:?}: {}", style, sess.cstore.get_crate_data(depnum).name.clone()); add_library(sess, depnum, style, &mut formats); diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs index 417e5336249..24073848edf 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc/middle/entry.rs @@ -139,7 +139,7 @@ fn configure_main(this: &mut EntryContext) { but you have one or more functions named 'main' that are not \ defined at the crate level. Either move the definition or \ attach the `#[main]` attribute to override this behavior."); - for &(_, span) in this.non_main_fns.iter() { + for &(_, span) in &this.non_main_fns { this.session.span_note(span, "here is a function named 'main'"); } this.session.abort_if_errors(); diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 4a0bed57433..44a816eb2f8 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -342,7 +342,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { fn walk_arg_patterns(&mut self, decl: &ast::FnDecl, body: &ast::Block) { - for arg in decl.inputs.iter() { + for arg in &decl.inputs { let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id)); let fn_body_scope = region::CodeExtent::from_node_id(body.id); @@ -372,7 +372,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { } fn consume_exprs(&mut self, exprs: &Vec<P<ast::Expr>>) { - for expr in exprs.iter() { + for expr in exprs { self.consume_expr(&**expr); } } @@ -476,7 +476,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { ast::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => { self.consume_expr(&**cond_expr); self.walk_block(&**then_blk); - for else_expr in opt_else_expr.iter() { + if let Some(ref else_expr) = *opt_else_expr { self.consume_expr(&**else_expr); } } @@ -490,7 +490,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant); // treatment of the discriminant is handled while walking the arms. - for arm in arms.iter() { + for arm in arms { let mode = self.arm_move_mode(discr_cmt.clone(), arm); let mode = mode.match_mode(); self.walk_arm(discr_cmt.clone(), arm, mode); @@ -511,11 +511,11 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { } ast::ExprInlineAsm(ref ia) => { - for &(_, ref input) in ia.inputs.iter() { + for &(_, ref input) in &ia.inputs { self.consume_expr(&**input); } - for &(_, ref output, is_rw) in ia.outputs.iter() { + for &(_, ref output, is_rw) in &ia.outputs { self.mutate_expr(expr, &**output, if is_rw { WriteAndRead } else { JustWrite }); } @@ -572,7 +572,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { } ast::ExprRet(ref opt_expr) => { - for expr in opt_expr.iter() { + if let Some(ref expr) = *opt_expr { self.consume_expr(&**expr); } } @@ -715,11 +715,11 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { fn walk_block(&mut self, blk: &ast::Block) { debug!("walk_block(blk.id={})", blk.id); - for stmt in blk.stmts.iter() { + for stmt in &blk.stmts { self.walk_stmt(&**stmt); } - for tail_expr in blk.expr.iter() { + if let Some(ref tail_expr) = blk.expr { self.consume_expr(&**tail_expr); } } @@ -729,7 +729,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { fields: &Vec<ast::Field>, opt_with: &Option<P<ast::Expr>>) { // Consume the expressions supplying values for each field. - for field in fields.iter() { + for field in fields { self.consume_expr(&*field.expr); } @@ -762,7 +762,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { }; // Consume those fields of the with expression that are needed. - for with_field in with_fields.iter() { + for with_field in &with_fields { if !contains_field_named(with_field, fields) { let cmt_field = self.mc.cat_field(&*with_expr, with_cmt.clone(), @@ -908,7 +908,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { match pass_args { PassArgs::ByValue => { self.consume_expr(receiver); - for &arg in rhs.iter() { + for &arg in &rhs { self.consume_expr(arg); } @@ -926,7 +926,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id)); let bk = ty::ImmBorrow; - for &arg in rhs.iter() { + for &arg in &rhs { self.borrow_expr(arg, r, bk, OverloadedOperator); } return true; @@ -934,18 +934,18 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode<Span> { let mut mode = Unknown; - for pat in arm.pats.iter() { + for pat in &arm.pats { self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode); } mode } fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm, mode: MatchMode) { - for pat in arm.pats.iter() { + for pat in &arm.pats { self.walk_pat(discr_cmt.clone(), &**pat, mode); } - for guard in arm.guard.iter() { + if let Some(ref guard) = arm.guard { self.consume_expr(&**guard); } @@ -1195,7 +1195,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { debug!("walk_captures({})", closure_expr.repr(self.tcx())); ty::with_freevars(self.tcx(), closure_expr.id, |freevars| { - for freevar in freevars.iter() { + for freevar in freevars { let id_var = freevar.def.def_id().node; let upvar_id = ty::UpvarId { var_id: id_var, closure_expr_id: closure_expr.id }; diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs index 8b29ef9b880..8cb2774f7df 100644 --- a/src/librustc/middle/infer/combine.rs +++ b/src/librustc/middle/infer/combine.rs @@ -116,7 +116,7 @@ pub trait Combine<'tcx> : Sized { { let mut substs = subst::Substs::empty(); - for &space in subst::ParamSpace::all().iter() { + for &space in &subst::ParamSpace::all() { let a_tps = a_subst.types.get_slice(space); let b_tps = b_subst.types.get_slice(space); let tps = try!(self.tps(space, a_tps, b_tps)); @@ -129,7 +129,7 @@ pub trait Combine<'tcx> : Sized { } (&NonerasedRegions(ref a), &NonerasedRegions(ref b)) => { - for &space in subst::ParamSpace::all().iter() { + for &space in &subst::ParamSpace::all() { let a_regions = a.get_slice(space); let b_regions = b.get_slice(space); @@ -139,7 +139,7 @@ pub trait Combine<'tcx> : Sized { variances.regions.get_slice(space) } None => { - for _ in a_regions.iter() { + for _ in a_regions { invariance.push(ty::Invariant); } &invariance[] diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs index f0174c5b0c6..2efc07a4023 100644 --- a/src/librustc/middle/infer/error_reporting.rs +++ b/src/librustc/middle/infer/error_reporting.rs @@ -170,7 +170,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { errors: &Vec<RegionResolutionError<'tcx>>) { let p_errors = self.process_errors(errors); let errors = if p_errors.is_empty() { errors } else { &p_errors }; - for error in errors.iter() { + for error in errors { match error.clone() { ConcreteFailure(origin, sub, sup) => { self.report_concrete_failure(origin, sub, sup); @@ -222,7 +222,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { let mut trace_origins = Vec::new(); let mut same_regions = Vec::new(); let mut processed_errors = Vec::new(); - for error in errors.iter() { + for error in errors { match error.clone() { ConcreteFailure(origin, sub, sup) => { debug!("processing ConcreteFailure"); @@ -257,7 +257,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { } if !same_regions.is_empty() { let common_scope_id = same_regions[0].scope_id; - for sr in same_regions.iter() { + for sr in &same_regions { // Since ProcessedErrors is used to reconstruct the function // declaration, we want to make sure that they are, in fact, // from the same scope @@ -796,11 +796,11 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { var_origins: &[RegionVariableOrigin], trace_origins: &[(TypeTrace<'tcx>, ty::type_err<'tcx>)], same_regions: &[SameRegions]) { - for vo in var_origins.iter() { + for vo in var_origins { self.report_inference_failure(vo.clone()); } self.give_suggestion(same_regions); - for &(ref trace, terr) in trace_origins.iter() { + for &(ref trace, terr) in trace_origins { self.report_type_error(trace.clone(), &terr); } } @@ -916,7 +916,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { let mut ty_params = self.generics.ty_params.clone(); let where_clause = self.generics.where_clause.clone(); let mut kept_lifetimes = HashSet::new(); - for sr in self.same_regions.iter() { + for sr in self.same_regions { self.cur_anon.set(0); self.offset_cur_anon(); let (anon_nums, region_names) = @@ -958,7 +958,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { // vector of string and then sort them. However, it makes the // choice of lifetime name deterministic and thus easier to test. let mut names = Vec::new(); - for rn in region_names.iter() { + for rn in region_names { let lt_name = token::get_name(*rn).get().to_string(); names.push(lt_name); } @@ -973,7 +973,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { -> (HashSet<u32>, HashSet<ast::Name>) { let mut anon_nums = HashSet::new(); let mut region_names = HashSet::new(); - for br in same_regions.regions.iter() { + for br in &same_regions.regions { match *br { ty::BrAnon(i) => { anon_nums.insert(i); @@ -989,8 +989,8 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { fn extract_all_region_names(&self) -> HashSet<ast::Name> { let mut all_region_names = HashSet::new(); - for sr in self.same_regions.iter() { - for br in sr.regions.iter() { + for sr in self.same_regions { + for br in &sr.regions { match *br { ty::BrNamed(_, name) => { all_region_names.insert(name); @@ -1123,11 +1123,11 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { where_clause: ast::WhereClause) -> ast::Generics { let mut lifetimes = Vec::new(); - for lt in add.iter() { + for lt in add { lifetimes.push(ast::LifetimeDef { lifetime: *lt, bounds: Vec::new() }); } - for lt in generics.lifetimes.iter() { + for lt in &generics.lifetimes { if keep.contains(<.lifetime.name) || !remove.contains(<.lifetime.name) { lifetimes.push((*lt).clone()); @@ -1147,7 +1147,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { region_names: &HashSet<ast::Name>) -> Vec<ast::Arg> { let mut new_inputs = Vec::new(); - for arg in inputs.iter() { + for arg in inputs { let new_ty = self.rebuild_arg_ty_or_output(&*arg.ty, lifetime, anon_nums, region_names); let possibly_new_arg = ast::Arg { @@ -1729,7 +1729,7 @@ struct LifeGiver { impl LifeGiver { fn with_taken(taken: &[ast::LifetimeDef]) -> LifeGiver { let mut taken_ = HashSet::new(); - for lt in taken.iter() { + for lt in taken { let lt_name = token::get_name(lt.lifetime.name).get().to_string(); taken_.insert(lt_name); } diff --git a/src/librustc/middle/infer/higher_ranked/mod.rs b/src/librustc/middle/infer/higher_ranked/mod.rs index 0df84323ae5..e4eecd919c8 100644 --- a/src/librustc/middle/infer/higher_ranked/mod.rs +++ b/src/librustc/middle/infer/higher_ranked/mod.rs @@ -176,7 +176,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C // in both A and B. Replace the variable with the "first" // bound region from A that we find it to be associated // with. - for (a_br, a_r) in a_map.iter() { + for (a_br, a_r) in a_map { if tainted.iter().any(|x| x == a_r) { debug!("generalize_region(r0={:?}): \ replacing with {:?}, tainted={:?}", @@ -258,7 +258,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C let mut a_r = None; let mut b_r = None; let mut only_new_vars = true; - for r in tainted.iter() { + for r in &tainted { if is_var_in_set(a_vars, *r) { if a_r.is_some() { return fresh_bound_variable(infcx, debruijn); @@ -315,7 +315,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C a_map: &FnvHashMap<ty::BoundRegion, ty::Region>, r: ty::Region) -> ty::Region { - for (a_br, a_r) in a_map.iter() { + for (a_br, a_r) in a_map { if *a_r == r { return ty::ReLateBound(ty::DebruijnIndex::new(1), *a_br); } @@ -497,9 +497,9 @@ pub fn leak_check<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, skol_map.repr(infcx.tcx)); let new_vars = infcx.region_vars_confined_to_snapshot(snapshot); - for (&skol_br, &skol) in skol_map.iter() { + for (&skol_br, &skol) in skol_map { let tainted = infcx.tainted_regions(snapshot, skol); - for &tainted_region in tainted.iter() { + for &tainted_region in &tainted { // Each skolemized should only be relatable to itself // or new variables: match tainted_region { diff --git a/src/librustc/middle/infer/mod.rs b/src/librustc/middle/infer/mod.rs index 1665966a5e5..f8dae3e92da 100644 --- a/src/librustc/middle/infer/mod.rs +++ b/src/librustc/middle/infer/mod.rs @@ -998,8 +998,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty), error_str)[]); - for err in err.iter() { - ty::note_and_explain_type_err(self.tcx, *err) + if let Some(err) = err { + ty::note_and_explain_type_err(self.tcx, err) } } } diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index 8952452cb4b..919ea0a2520 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -667,7 +667,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { a, b); } VerifyGenericBound(_, _, a, ref bs) => { - for &b in bs.iter() { + for &b in bs { consider_adding_bidirectional_edges( &mut result_set, r, a, b); @@ -1200,7 +1200,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { errors: &mut Vec<RegionResolutionError<'tcx>>) { let mut reg_reg_dups = FnvHashSet(); - for verify in self.verifys.borrow().iter() { + for verify in &*self.verifys.borrow() { match *verify { VerifyRegSubReg(ref origin, sub, sup) => { if self.is_subregion_of(sub, sup) { @@ -1333,7 +1333,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { } let dummy_idx = graph.add_node(()); - for (constraint, _) in constraints.iter() { + for (constraint, _) in &*constraints { match *constraint { ConstrainVarSubVar(a_id, b_id) => { graph.add_edge(NodeIndex(a_id.index as uint), @@ -1393,8 +1393,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { lower_bounds.sort_by(|a, b| { free_regions_first(a, b) }); upper_bounds.sort_by(|a, b| { free_regions_first(a, b) }); - for lower_bound in lower_bounds.iter() { - for upper_bound in upper_bounds.iter() { + for lower_bound in &lower_bounds { + for upper_bound in &upper_bounds { if !self.is_subregion_of(lower_bound.region, upper_bound.region) { errors.push(SubSupConflict( @@ -1435,8 +1435,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { return; } - for upper_bound_1 in upper_bounds.iter() { - for upper_bound_2 in upper_bounds.iter() { + for upper_bound_1 in &upper_bounds { + for upper_bound_2 in &upper_bounds { match self.glb_concrete_regions(upper_bound_1.region, upper_bound_2.region) { Ok(_) => {} @@ -1554,7 +1554,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { changed = false; iteration += 1; debug!("---- {} Iteration {}{}", "#", tag, iteration); - for (constraint, _) in self.constraints.borrow().iter() { + for (constraint, _) in &*self.constraints.borrow() { let edge_changed = body(constraint); if edge_changed { debug!("Updated due to constraint {}", diff --git a/src/librustc/middle/infer/type_variable.rs b/src/librustc/middle/infer/type_variable.rs index 65061a29b78..9b8a4a84412 100644 --- a/src/librustc/middle/infer/type_variable.rs +++ b/src/librustc/middle/infer/type_variable.rs @@ -105,7 +105,7 @@ impl<'tcx> TypeVariableTable<'tcx> { already instantiated") }; - for &(dir, vid) in relations.iter() { + for &(dir, vid) in &relations { stack.push((ty, dir, vid)); } @@ -165,7 +165,7 @@ impl<'tcx> TypeVariableTable<'tcx> { let mut escaping_types = Vec::new(); let actions_since_snapshot = self.values.actions_since_snapshot(&s.snapshot); debug!("actions_since_snapshot.len() = {}", actions_since_snapshot.len()); - for action in actions_since_snapshot.iter() { + for action in actions_since_snapshot { match *action { sv::UndoLog::NewElem(index) => { // if any new variables were created during the diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 29a615f2b40..d9b90c1935a 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -120,7 +120,7 @@ impl LanguageItems { (self.fn_once_trait(), ty::FnOnceClosureKind), ]; - for &(opt_def_id, kind) in def_id_kinds.iter() { + for &(opt_def_id, kind) in &def_id_kinds { if Some(id) == opt_def_id { return Some(kind); } @@ -217,7 +217,7 @@ impl<'a> LanguageItemCollector<'a> { } pub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> { - for attribute in attrs.iter() { + for attribute in attrs { match attribute.value_str() { Some(ref value) if attribute.check_name("lang") => { return Some(value.clone()); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index cb0157fed87..e40e04bdee8 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -378,7 +378,7 @@ fn visit_fn(ir: &mut IrMaps, debug!("creating fn_maps: {:?}", &fn_maps as *const IrMaps); - for arg in decl.inputs.iter() { + for arg in &decl.inputs { pat_util::pat_bindings(&ir.tcx.def_map, &*arg.pat, |_bm, arg_id, _x, path1| { @@ -427,7 +427,7 @@ fn visit_local(ir: &mut IrMaps, local: &ast::Local) { } fn visit_arm(ir: &mut IrMaps, arm: &ast::Arm) { - for pat in arm.pats.iter() { + for pat in &arm.pats { pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| { debug!("adding local variable {} from match with bm {:?}", p_id, bm); @@ -464,7 +464,7 @@ fn visit_expr(ir: &mut IrMaps, expr: &Expr) { // construction site. let mut call_caps = Vec::new(); ty::with_freevars(ir.tcx, expr.id, |freevars| { - for fv in freevars.iter() { + for fv in freevars { if let DefLocal(rv) = fv.def { let fv_ln = ir.add_live_node(FreeVarNode(fv.span)); call_caps.push(CaptureInfo {ln: fv_ln, @@ -1049,7 +1049,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let ln = self.live_node(expr.id, expr.span); self.init_empty(ln, succ); let mut first_merge = true; - for arm in arms.iter() { + for arm in arms { let body_succ = self.propagate_through_expr(&*arm.body, succ); let guard_succ = @@ -1445,12 +1445,12 @@ fn check_expr(this: &mut Liveness, expr: &Expr) { } ast::ExprInlineAsm(ref ia) => { - for &(_, ref input) in ia.inputs.iter() { + for &(_, ref input) in &ia.inputs { this.visit_expr(&**input); } // Output operands must be lvalues - for &(_, ref out, _) in ia.outputs.iter() { + for &(_, ref out, _) in &ia.outputs { this.check_lvalue(&**out); this.visit_expr(&**out); } @@ -1590,7 +1590,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } fn warn_about_unused_args(&self, decl: &ast::FnDecl, entry_ln: LiveNode) { - for arg in decl.inputs.iter() { + for arg in &decl.inputs { pat_util::pat_bindings(&self.ir.tcx.def_map, &*arg.pat, |_bm, p_id, sp, path1| { @@ -1620,7 +1620,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { -> bool { if !self.used_on_entry(ln, var) { let r = self.should_warn(var); - for name in r.iter() { + if let Some(name) = r { // annoying: for parameters in funcs like `fn(x: int) // {ret}`, there is only one node, so asking about @@ -1634,10 +1634,10 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { if is_assigned { self.ir.tcx.sess.add_lint(lint::builtin::UNUSED_VARIABLES, id, sp, format!("variable `{}` is assigned to, but never used", - *name)); + name)); } else { self.ir.tcx.sess.add_lint(lint::builtin::UNUSED_VARIABLES, id, sp, - format!("unused variable: `{}`", *name)); + format!("unused variable: `{}`", name)); } } true @@ -1653,9 +1653,9 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { var: Variable) { if self.live_on_exit(ln, var).is_none() { let r = self.should_warn(var); - for name in r.iter() { + if let Some(name) = r { self.ir.tcx.sess.add_lint(lint::builtin::UNUSED_ASSIGNMENTS, id, sp, - format!("value assigned to `{}` is never read", *name)); + format!("value assigned to `{}` is never read", name)); } } } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 93daaa72843..1ae483be269 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -1208,7 +1208,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { } } Some(&def::DefConst(..)) => { - for subpat in subpats.iter() { + for subpat in subpats { try!(self.cat_pattern_(cmt.clone(), &**subpat, op)); } } @@ -1230,7 +1230,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { ast::PatStruct(_, ref field_pats, _) => { // {f1: p1, ..., fN: pN} - for fp in field_pats.iter() { + for fp in field_pats { let field_ty = try!(self.pat_ty(&*fp.node.pat)); // see (*2) let cmt_field = self.cat_field(pat, cmt.clone(), fp.node.ident.name, field_ty); try!(self.cat_pattern_(cmt_field, &*fp.node.pat, op)); @@ -1259,15 +1259,15 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { ast::PatVec(ref before, ref slice, ref after) => { let elt_cmt = try!(self.cat_index(pat, try!(self.deref_vec(pat, cmt)))); - for before_pat in before.iter() { + for before_pat in before { try!(self.cat_pattern_(elt_cmt.clone(), &**before_pat, op)); } - for slice_pat in slice.iter() { + if let Some(ref slice_pat) = *slice { let slice_ty = try!(self.pat_ty(&**slice_pat)); let slice_cmt = self.cat_rvalue_node(pat.id(), pat.span(), slice_ty); try!(self.cat_pattern_(slice_cmt, &**slice_pat, op)); } - for after_pat in after.iter() { + for after_pat in after { try!(self.cat_pattern_(elt_cmt.clone(), &**after_pat, op)); } } diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index b93cde4bf64..0af226de251 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -353,7 +353,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { // this properly would result in the necessity of computing *type* // reachability, which might result in a compile time loss. fn mark_destructors_reachable(&mut self) { - for (_, destructor_def_id) in self.tcx.destructor_for_type.borrow().iter() { + for (_, destructor_def_id) in &*self.tcx.destructor_for_type.borrow() { if destructor_def_id.krate == ast::LOCAL_CRATE { self.reachable_symbols.insert(destructor_def_id.node); } @@ -371,7 +371,7 @@ pub fn find_reachable(tcx: &ty::ctxt, // other crates link to us, they're going to expect to be able to // use the lang items, so we need to be sure to mark them as // exported. - for id in exported_items.iter() { + for id in exported_items { reachable_context.worklist.push(*id); } for (_, item) in tcx.lang_items.items() { diff --git a/src/librustc/middle/recursion_limit.rs b/src/librustc/middle/recursion_limit.rs index da83833fba3..7dcd358165c 100644 --- a/src/librustc/middle/recursion_limit.rs +++ b/src/librustc/middle/recursion_limit.rs @@ -20,7 +20,7 @@ use syntax::ast; use syntax::attr::AttrMetaMethods; pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate) { - for attr in krate.attrs.iter() { + for attr in &krate.attrs { if !attr.check_name("recursion_limit") { continue; } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 3f8b7e5a7b3..87d386d94c9 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -888,14 +888,14 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) { record_rvalue_scope(visitor, &**subexpr, blk_id); } ast::ExprStruct(_, ref fields, _) => { - for field in fields.iter() { + for field in fields { record_rvalue_scope_if_borrow_expr( visitor, &*field.expr, blk_id); } } ast::ExprVec(ref subexprs) | ast::ExprTup(ref subexprs) => { - for subexpr in subexprs.iter() { + for subexpr in subexprs { record_rvalue_scope_if_borrow_expr( visitor, &**subexpr, blk_id); } diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 9433f7b0a70..365355c4a2a 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -187,14 +187,14 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { } fn visit_generics(&mut self, generics: &ast::Generics) { - for ty_param in generics.ty_params.iter() { + for ty_param in &*generics.ty_params { visit::walk_ty_param_bounds_helper(self, &ty_param.bounds); match ty_param.default { Some(ref ty) => self.visit_ty(&**ty), None => {} } } - for predicate in generics.where_clause.predicates.iter() { + for predicate in &generics.where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ ref bounded_ty, ref bounds, @@ -207,7 +207,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { .. }) => { self.visit_lifetime_ref(lifetime); - for bound in bounds.iter() { + for bound in bounds { self.visit_lifetime_ref(bound); } } @@ -229,7 +229,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { self.with(LateScope(&trait_ref.bound_lifetimes, self.scope), |old_scope, this| { this.check_lifetime_defs(old_scope, &trait_ref.bound_lifetimes); - for lifetime in trait_ref.bound_lifetimes.iter() { + for lifetime in &trait_ref.bound_lifetimes { this.visit_lifetime_def(lifetime); } this.visit_trait_ref(&trait_ref.trait_ref) @@ -408,7 +408,7 @@ impl<'a> LifetimeContext<'a> { let lifetime_i = &lifetimes[i]; let special_idents = [special_idents::static_lifetime]; - for lifetime in lifetimes.iter() { + for lifetime in lifetimes { if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) { span_err!(self.sess, lifetime.lifetime.span, E0262, "illegal lifetime parameter name: `{}`", @@ -431,7 +431,7 @@ impl<'a> LifetimeContext<'a> { // It is a soft error to shadow a lifetime within a parent scope. self.check_lifetime_def_for_shadowing(old_scope, &lifetime_i.lifetime); - for bound in lifetime_i.bounds.iter() { + for bound in &lifetime_i.bounds { self.resolve_lifetime_ref(bound); } } @@ -535,10 +535,10 @@ fn early_bound_lifetime_names(generics: &ast::Generics) -> Vec<ast::Name> { let mut collector = FreeLifetimeCollector { early_bound: &mut early_bound, late_bound: &mut late_bound }; - for ty_param in generics.ty_params.iter() { + for ty_param in &*generics.ty_params { visit::walk_ty_param_bounds_helper(&mut collector, &ty_param.bounds); } - for predicate in generics.where_clause.predicates.iter() { + for predicate in &generics.where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounds, ref bounded_ty, @@ -551,7 +551,7 @@ fn early_bound_lifetime_names(generics: &ast::Generics) -> Vec<ast::Name> { ..}) => { collector.visit_lifetime_ref(lifetime); - for bound in bounds.iter() { + for bound in bounds { collector.visit_lifetime_ref(bound); } } @@ -562,11 +562,11 @@ fn early_bound_lifetime_names(generics: &ast::Generics) -> Vec<ast::Name> { // Any lifetime that either has a bound or is referenced by a // bound is early. - for lifetime_def in generics.lifetimes.iter() { + for lifetime_def in &generics.lifetimes { if !lifetime_def.bounds.is_empty() { shuffle(&mut early_bound, &mut late_bound, lifetime_def.lifetime.name); - for bound in lifetime_def.bounds.iter() { + for bound in &lifetime_def.bounds { shuffle(&mut early_bound, &mut late_bound, bound.name); } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 7b5af0ab1aa..5028a1322ca 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -148,7 +148,7 @@ impl Index { /// Construct the stability index for a crate being compiled. pub fn build(sess: &Session, krate: &Crate) -> Index { let mut staged_api = false; - for attr in krate.attrs.iter() { + for attr in &krate.attrs { if attr.name().get() == "staged_api" { match attr.node.value.node { ast::MetaWord(_) => { @@ -273,7 +273,7 @@ pub fn check_item(tcx: &ty::ctxt, item: &ast::Item, maybe_do_stability_check(tcx, id, item.span, cb); } ast::ItemTrait(_, _, ref supertraits, _) => { - for t in supertraits.iter() { + for t in &**supertraits { if let ast::TraitTyParamBound(ref t, _) = *t { let id = ty::trait_ref_to_def_id(tcx, &t.trait_ref); maybe_do_stability_check(tcx, id, t.trait_ref.path.span, cb); @@ -410,11 +410,11 @@ pub fn check_unused_features(sess: &Session, let mut active_lib_features: FnvHashMap<InternedString, Span> = lib_features.clone().into_iter().collect(); - for used_feature in used_lib_features.iter() { + for used_feature in used_lib_features { active_lib_features.remove(used_feature); } - for (_, &span) in active_lib_features.iter() { + for (_, &span) in &active_lib_features { sess.add_lint(lint::builtin::UNUSED_FEATURES, ast::CRATE_NODE_ID, span, diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index cbf65080a86..2cf8a83db9b 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -241,7 +241,7 @@ pub struct SeparateVecsPerParamSpace<T> { impl<T: fmt::Debug> fmt::Debug for VecPerParamSpace<T> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "VecPerParamSpace {{")); - for space in ParamSpace::all().iter() { + for space in &ParamSpace::all() { try!(write!(fmt, "{:?}: {:?}, ", *space, self.get_slice(*space))); } try!(write!(fmt, "}}")); diff --git a/src/librustc/middle/traits/doc.rs b/src/librustc/middle/traits/doc.rs index 4a01e964fd2..8ce4e38896e 100644 --- a/src/librustc/middle/traits/doc.rs +++ b/src/librustc/middle/traits/doc.rs @@ -35,7 +35,7 @@ provide an impl. To see what I mean, consider the body of `clone_slice`: fn clone_slice<T:Clone>(x: &[T]) -> Vec<T> { let mut v = Vec::new(); - for e in x.iter() { + for e in &x { v.push((*e).clone()); // (*) } } diff --git a/src/librustc/middle/traits/error_reporting.rs b/src/librustc/middle/traits/error_reporting.rs index 5e7168b6735..b8886fa65ba 100644 --- a/src/librustc/middle/traits/error_reporting.rs +++ b/src/librustc/middle/traits/error_reporting.rs @@ -28,7 +28,7 @@ use util::ppaux::{Repr, UserString}; pub fn report_fulfillment_errors<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, errors: &Vec<FulfillmentError<'tcx>>) { - for error in errors.iter() { + for error in errors { report_fulfillment_error(infcx, error); } } @@ -68,7 +68,7 @@ fn report_on_unimplemented<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, span: Span) -> Option<String> { let def_id = trait_ref.def_id; let mut report = None; - for item in ty::get_attrs(infcx.tcx, def_id).iter() { + for item in &*ty::get_attrs(infcx.tcx, def_id) { if item.check_name("rustc_on_unimplemented") { let err_sp = if item.meta().span == DUMMY_SP { span diff --git a/src/librustc/middle/traits/object_safety.rs b/src/librustc/middle/traits/object_safety.rs index 530190ddfc1..c88e58266a0 100644 --- a/src/librustc/middle/traits/object_safety.rs +++ b/src/librustc/middle/traits/object_safety.rs @@ -176,7 +176,7 @@ fn object_safety_violations_for_method<'tcx>(tcx: &ty::ctxt<'tcx>, // The `Self` type is erased, so it should not appear in list of // arguments or return type apart from the receiver. let ref sig = method.fty.sig; - for &input_ty in sig.0.inputs[1..].iter() { + for &input_ty in &sig.0.inputs[1..] { if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) { return Some(MethodViolationCode::ReferencesSelf); } diff --git a/src/librustc/middle/traits/project.rs b/src/librustc/middle/traits/project.rs index ce66f4d5b35..3ede6bbb965 100644 --- a/src/librustc/middle/traits/project.rs +++ b/src/librustc/middle/traits/project.rs @@ -802,7 +802,7 @@ fn confirm_impl_candidate<'cx,'tcx>( let impl_items = &impl_items_map[impl_vtable.impl_def_id]; let mut impl_ty = None; - for impl_item in impl_items.iter() { + for impl_item in impl_items { let assoc_type = match impl_or_trait_items_map[impl_item.def_id()] { ty::TypeTraitItem(ref assoc_type) => assoc_type.clone(), ty::MethodTraitItem(..) => { continue; } diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 106c07baaa7..91eec4e4c45 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -1089,7 +1089,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!("assemble_candidates_from_impls(self_ty={})", self_ty.repr(self.tcx())); let all_impls = self.all_impls(obligation.predicate.def_id()); - for &impl_def_id in all_impls.iter() { + for &impl_def_id in &all_impls { self.infcx.probe(|snapshot| { let (skol_obligation_trait_pred, skol_map) = self.infcx().skolemize_late_bound_regions(&obligation.predicate, snapshot); diff --git a/src/librustc/middle/traits/util.rs b/src/librustc/middle/traits/util.rs index edeca83d569..45ce692bb07 100644 --- a/src/librustc/middle/traits/util.rs +++ b/src/librustc/middle/traits/util.rs @@ -343,7 +343,7 @@ pub fn get_vtable_index_of_object_method<'tcx>(tcx: &ty::ctxt<'tcx>, } let trait_items = ty::trait_items(tcx, bound_ref.def_id()); - for trait_item in trait_items.iter() { + for trait_item in &**trait_items { match *trait_item { ty::MethodTraitItem(_) => method_count += 1, ty::TypeTraitItem(_) => {} diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 1544db8ce4a..ec331d8f4b9 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -872,7 +872,7 @@ macro_rules! sty_debug_print { $(let mut $variant = total;)* - for (_, t) in tcx.interner.borrow().iter() { + for (_, t) in &*tcx.interner.borrow() { let variant = match t.sty { ty::ty_bool | ty::ty_char | ty::ty_int(..) | ty::ty_uint(..) | ty::ty_float(..) | ty::ty_str => continue, @@ -2579,7 +2579,7 @@ impl FlagComputation { &ty_trait(box TyTrait { ref principal, ref bounds }) => { let mut computation = FlagComputation::new(); computation.add_substs(principal.0.substs); - for projection_bound in bounds.projection_bounds.iter() { + for projection_bound in &bounds.projection_bounds { let mut proj_computation = FlagComputation::new(); proj_computation.add_projection_predicate(&projection_bound.0); computation.add_bound_computation(&proj_computation); @@ -2618,7 +2618,7 @@ impl FlagComputation { } fn add_tys(&mut self, tys: &[Ty]) { - for &ty in tys.iter() { + for &ty in tys { self.add_ty(ty); } } @@ -3530,7 +3530,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { // make no assumptions (other than that it cannot have an // in-scope type parameter within, which makes no sense). let mut tc = TC::All - TC::InteriorParam; - for bound in bounds.builtin_bounds.iter() { + for bound in &bounds.builtin_bounds { tc = tc - match bound { BoundSync | BoundSend | BoundCopy => TC::None, BoundSized => TC::Nonsized, @@ -4644,7 +4644,7 @@ pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId { pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) -> uint { let mut i = 0; - for f in fields.iter() { if f.name == name { return i; } i += 1; } + for f in fields { if f.name == name { return i; } i += 1; } tcx.sess.bug(&format!( "no field named `{}` found in the list of fields `{:?}`", token::get_name(name), @@ -5468,25 +5468,25 @@ pub fn predicates<'tcx>( { let mut vec = Vec::new(); - for builtin_bound in bounds.builtin_bounds.iter() { + for builtin_bound in &bounds.builtin_bounds { match traits::trait_ref_for_builtin_bound(tcx, builtin_bound, param_ty) { Ok(trait_ref) => { vec.push(trait_ref.as_predicate()); } Err(ErrorReported) => { } } } - for ®ion_bound in bounds.region_bounds.iter() { + for ®ion_bound in &bounds.region_bounds { // account for the binder being introduced below; no need to shift `param_ty` // because, at present at least, it can only refer to early-bound regions let region_bound = ty_fold::shift_region(region_bound, 1); vec.push(ty::Binder(ty::OutlivesPredicate(param_ty, region_bound)).as_predicate()); } - for bound_trait_ref in bounds.trait_bounds.iter() { + for bound_trait_ref in &bounds.trait_bounds { vec.push(bound_trait_ref.as_predicate()); } - for projection in bounds.projection_bounds.iter() { + for projection in &bounds.projection_bounds { vec.push(projection.as_predicate()); } @@ -5931,17 +5931,17 @@ pub fn populate_implementations_for_type_if_necessary(tcx: &ctxt, // Record the trait->implementation mappings, if applicable. let associated_traits = csearch::get_impl_trait(tcx, impl_def_id); - for trait_ref in associated_traits.iter() { + if let Some(ref trait_ref) = associated_traits { record_trait_implementation(tcx, trait_ref.def_id, impl_def_id); } // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. - for impl_item_def_id in impl_items.iter() { + for impl_item_def_id in &impl_items { let method_def_id = impl_item_def_id.def_id(); match impl_or_trait_item(tcx, method_def_id) { MethodTraitItem(method) => { - for &source in method.provided_source.iter() { + if let Some(source) = method.provided_source { tcx.provided_method_sources .borrow_mut() .insert(method_def_id, source); @@ -5985,11 +5985,11 @@ pub fn populate_implementations_for_trait_if_necessary( // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. - for impl_item_def_id in impl_items.iter() { + for impl_item_def_id in &impl_items { let method_def_id = impl_item_def_id.def_id(); match impl_or_trait_item(tcx, method_def_id) { MethodTraitItem(method) => { - for &source in method.provided_source.iter() { + if let Some(source) = method.provided_source { tcx.provided_method_sources .borrow_mut() .insert(method_def_id, source); @@ -6121,7 +6121,7 @@ pub fn hash_crate_independent<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh) - }; let fn_sig = |&: state: &mut SipHasher, sig: &Binder<FnSig<'tcx>>| { let sig = anonymize_late_bound_regions(tcx, sig).0; - for a in sig.inputs.iter() { helper(tcx, *a, svh, state); } + for a in &sig.inputs { helper(tcx, *a, svh, state); } if let ty::FnConverging(output) = sig.output { helper(tcx, output, svh, state); } @@ -6270,7 +6270,7 @@ pub fn construct_free_substs<'a,'tcx>( free_id: ast::NodeId, region_params: &[RegionParameterDef]) { - for r in region_params.iter() { + for r in region_params { regions.push(r.space, ty::free_region_from_def(free_id, r)); } } @@ -6278,7 +6278,7 @@ pub fn construct_free_substs<'a,'tcx>( fn push_types_from_defs<'tcx>(tcx: &ty::ctxt<'tcx>, types: &mut VecPerParamSpace<Ty<'tcx>>, defs: &[TypeParameterDef<'tcx>]) { - for def in defs.iter() { + for def in defs { debug!("construct_parameter_environment(): push_types_from_defs: def={:?}", def.repr(tcx)); let ty = ty::mk_param_from_def(tcx, def); @@ -6351,7 +6351,7 @@ pub fn construct_parameter_environment<'a,'tcx>( fn record_region_bounds<'tcx>(tcx: &ty::ctxt<'tcx>, predicates: &[ty::Predicate<'tcx>]) { debug!("record_region_bounds(predicates={:?})", predicates.repr(tcx)); - for predicate in predicates.iter() { + for predicate in predicates { match *predicate { Predicate::Projection(..) | Predicate::Trait(..) | @@ -6870,7 +6870,7 @@ pub fn can_type_implement_copy<'a,'tcx>(param_env: &ParameterEnvironment<'a, 'tc let did = match self_type.sty { ty::ty_struct(struct_did, substs) => { let fields = ty::struct_fields(tcx, struct_did, substs); - for field in fields.iter() { + for field in &fields { if type_moves_by_default(param_env, span, field.mt.ty) { return Err(FieldDoesNotImplementCopy(field.name)) } @@ -6879,8 +6879,8 @@ pub fn can_type_implement_copy<'a,'tcx>(param_env: &ParameterEnvironment<'a, 'tc } ty::ty_enum(enum_did, substs) => { let enum_variants = ty::enum_variants(tcx, enum_did); - for variant in enum_variants.iter() { - for variant_arg_type in variant.args.iter() { + for variant in &*enum_variants { + for variant_arg_type in &variant.args { let substd_arg_type = variant_arg_type.subst(tcx, substs); if type_moves_by_default(param_env, span, substd_arg_type) { diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 25cca98c5fb..425c9a4c9f7 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -78,7 +78,7 @@ fn verify(sess: &Session, items: &lang_items::LanguageItems) { let mut missing = HashSet::new(); sess.cstore.iter_crate_data(|cnum, _| { - for item in csearch::get_missing_lang_items(&sess.cstore, cnum).iter() { + for item in &csearch::get_missing_lang_items(&sess.cstore, cnum) { missing.insert(*item); } }); diff --git a/src/librustc/plugin/build.rs b/src/librustc/plugin/build.rs index a8018662d29..110e672b70f 100644 --- a/src/librustc/plugin/build.rs +++ b/src/librustc/plugin/build.rs @@ -48,7 +48,7 @@ pub fn find_plugin_registrar(diagnostic: &diagnostic::SpanHandler, }, _ => { diagnostic.handler().err("multiple plugin registration functions found"); - for &(_, span) in finder.registrars.iter() { + for &(_, span) in &finder.registrars { diagnostic.span_note(span, "one is here"); } diagnostic.handler().abort_if_errors(); diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index c420d1f15b4..22edd7c691a 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -73,7 +73,7 @@ pub fn load_plugins(sess: &Session, krate: &ast::Crate, // We need to error on `#[macro_use] extern crate` when it isn't at the // crate root, because `$crate` won't work properly. Identify these by // spans, because the crate map isn't set up yet. - for item in krate.module.items.iter() { + for item in &krate.module.items { if let ast::ItemExternCrate(_) = item.node { loader.span_whitelist.insert(item.span); } @@ -82,7 +82,7 @@ pub fn load_plugins(sess: &Session, krate: &ast::Crate, visit::walk_crate(&mut loader, krate); if let Some(plugins) = addl_plugins { - for plugin in plugins.iter() { + for plugin in &plugins { loader.load_plugin(CrateOrString::Str(plugin.as_slice()), None, None, None) } @@ -107,7 +107,7 @@ impl<'a, 'v> Visitor<'v> for PluginLoader<'a> { let mut plugin_attr = None; let mut macro_selection = Some(HashSet::new()); // None => load all let mut reexport = HashSet::new(); - for attr in item.attrs.iter() { + for attr in &item.attrs { let mut used = true; match attr.name().get() { "phase" => { @@ -127,7 +127,7 @@ impl<'a, 'v> Visitor<'v> for PluginLoader<'a> { macro_selection = None; } if let (Some(sel), Some(names)) = (macro_selection.as_mut(), names) { - for name in names.iter() { + for name in names { if let ast::MetaWord(ref name) = name.node { sel.insert(name.clone()); } else { @@ -145,7 +145,7 @@ impl<'a, 'v> Visitor<'v> for PluginLoader<'a> { } }; - for name in names.iter() { + for name in names { if let ast::MetaWord(ref name) = name.node { reexport.insert(name.clone()); } else { diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index ab182dd2256..afeb123b7a5 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -306,7 +306,7 @@ macro_rules! options { let value = iter.next(); let option_to_lookup = key.replace("-", "_"); let mut found = false; - for &(candidate, setter, opt_type_desc, _) in $stat.iter() { + for &(candidate, setter, opt_type_desc, _) in $stat { if option_to_lookup != candidate { continue } if !setter(&mut op, value) { match (value, opt_type_desc) { @@ -830,7 +830,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut lint_opts = vec!(); let mut describe_lints = false; - for &level in [lint::Allow, lint::Warn, lint::Deny, lint::Forbid].iter() { + for &level in &[lint::Allow, lint::Warn, lint::Deny, lint::Forbid] { for lint_name in matches.opt_strs(level.as_str()).into_iter() { if lint_name == "help" { describe_lints = true; @@ -853,7 +853,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut output_types = Vec::new(); if !debugging_opts.parse_only && !no_trans { let unparsed_output_types = matches.opt_strs("emit"); - for unparsed_output_type in unparsed_output_types.iter() { + for unparsed_output_type in &unparsed_output_types { for part in unparsed_output_type.split(',') { let output_type = match part.as_slice() { "asm" => OutputTypeAssembly, @@ -923,7 +923,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { }; let mut search_paths = SearchPaths::new(); - for s in matches.opt_strs("L").iter() { + for s in &matches.opt_strs("L") { search_paths.add_path(&s[]); } @@ -997,7 +997,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { }; let mut externs = HashMap::new(); - for arg in matches.opt_strs("extern").iter() { + for arg in &matches.opt_strs("extern") { let mut parts = arg.splitn(1, '='); let name = match parts.next() { Some(s) => s, @@ -1049,7 +1049,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { pub fn parse_crate_types_from_list(list_list: Vec<String>) -> Result<Vec<CrateType>, String> { let mut crate_types: Vec<CrateType> = Vec::new(); - for unparsed_crate_type in list_list.iter() { + for unparsed_crate_type in &list_list { for part in unparsed_crate_type.split(',') { let new_part = match part { "lib" => default_lib_output(), diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index a433161d659..b77a70f1f5d 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -163,7 +163,7 @@ pub fn can_reach<T, S>(edges_map: &HashMap<T, Vec<T>, S>, source: T, while i < queue.len() { match edges_map.get(&queue[i]) { Some(edges) => { - for target in edges.iter() { + for target in edges { if *target == destination { return true; } diff --git a/src/librustc/util/nodemap.rs b/src/librustc/util/nodemap.rs index 7aa126371c4..8da06e63dae 100644 --- a/src/librustc/util/nodemap.rs +++ b/src/librustc/util/nodemap.rs @@ -62,7 +62,7 @@ impl Hasher for FnvHasher { impl Writer for FnvHasher { fn write(&mut self, bytes: &[u8]) { let FnvHasher(mut hash) = *self; - for byte in bytes.iter() { + for byte in bytes { hash = hash ^ (*byte as u64); hash = hash * 0x100000001b3; } diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index eb73757bf6a..458701f2dd7 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -494,11 +494,11 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, 0 }; - for t in tps[..tps.len() - num_defaults].iter() { + for t in &tps[..tps.len() - num_defaults] { strs.push(ty_to_string(cx, *t)) } - for projection in projections.iter() { + for projection in projections { strs.push(format!("{}={}", projection.projection_ty.item_name.user_string(cx), projection.ty.user_string(cx))); @@ -665,7 +665,7 @@ impl<'tcx> UserString<'tcx> for ty::TyTrait<'tcx> { components.push(tap.user_string(tcx)); // Builtin bounds. - for bound in bounds.builtin_bounds.iter() { + for bound in &bounds.builtin_bounds { components.push(bound.user_string(tcx)); } @@ -748,7 +748,7 @@ impl<'tcx> Repr<'tcx> for subst::RegionSubsts { impl<'tcx> Repr<'tcx> for ty::BuiltinBounds { fn repr(&self, _tcx: &ctxt) -> String { let mut res = Vec::new(); - for b in self.iter() { + for b in self { res.push(match b { ty::BoundSend => "Send".to_string(), ty::BoundSized => "Sized".to_string(), @@ -764,7 +764,7 @@ impl<'tcx> Repr<'tcx> for ty::ParamBounds<'tcx> { fn repr(&self, tcx: &ctxt<'tcx>) -> String { let mut res = Vec::new(); res.push(self.builtin_bounds.repr(tcx)); - for t in self.trait_bounds.iter() { + for t in &self.trait_bounds { res.push(t.repr(tcx)); } res.connect("+") @@ -1157,7 +1157,7 @@ impl<'tcx> UserString<'tcx> for ty::ParamBounds<'tcx> { if !s.is_empty() { result.push(s); } - for n in self.trait_bounds.iter() { + for n in &self.trait_bounds { result.push(n.user_string(tcx)); } result.connect(" + ") @@ -1173,11 +1173,11 @@ impl<'tcx> Repr<'tcx> for ty::ExistentialBounds<'tcx> { res.push(region_str); } - for bound in self.builtin_bounds.iter() { + for bound in &self.builtin_bounds { res.push(bound.user_string(tcx)); } - for projection_bound in self.projection_bounds.iter() { + for projection_bound in &self.projection_bounds { res.push(projection_bound.user_string(tcx)); } diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index ee8bc71668b..eff11a80691 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -99,7 +99,7 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str, let oslibname = format!("{}{}{}", osprefix, name, ossuffix); let unixlibname = format!("lib{}.a", name); - for path in search_paths.iter() { + for path in search_paths { debug!("looking for {} inside {:?}", name, path.display()); let test = path.join(&oslibname[]); if test.exists() { return test } @@ -244,7 +244,7 @@ impl<'a> ArchiveBuilder<'a> { // 32,768, and we leave a bit of extra space for the program name. static ARG_LENGTH_LIMIT: uint = 32000; - for member_name in self.members.iter() { + for member_name in &self.members { let len = member_name.as_vec().len(); // `len + 1` to account for the space that's inserted before each @@ -297,7 +297,7 @@ impl<'a> ArchiveBuilder<'a> { // all SYMDEF files as these are just magical placeholders which get // re-created when we make a new archive anyway. let files = try!(fs::readdir(loc.path())); - for file in files.iter() { + for file in &files { let filename = file.filename_str().unwrap(); if skip(filename) { continue } if filename.contains(".SYMDEF") { continue } diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index 47b909df5e8..311b07fc26f 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -51,7 +51,7 @@ pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> { let mut ret = Vec::new(); - for rpath in rpaths.iter() { + for rpath in rpaths { ret.push(format!("-Wl,-rpath,{}", &(*rpath)[])); } return ret; @@ -63,7 +63,7 @@ fn get_rpaths<F, G>(mut config: RPathConfig<F, G>, libs: &[Path]) -> Vec<String> { debug!("output: {:?}", config.out_filename.display()); debug!("libs:"); - for libpath in libs.iter() { + for libpath in libs { debug!(" {:?}", libpath.display()); } @@ -77,7 +77,7 @@ fn get_rpaths<F, G>(mut config: RPathConfig<F, G>, libs: &[Path]) -> Vec<String> fn log_rpaths(desc: &str, rpaths: &[String]) { debug!("{} rpaths:", desc); - for rpath in rpaths.iter() { + for rpath in rpaths { debug!(" {}", *rpath); } } @@ -139,7 +139,7 @@ fn get_install_prefix_rpath<F, G>(config: RPathConfig<F, G>) -> String where fn minimize_rpaths(rpaths: &[String]) -> Vec<String> { let mut set = HashSet::new(); let mut minimized = Vec::new(); - for rpath in rpaths.iter() { + for rpath in rpaths { if set.insert(&rpath[]) { minimized.push(rpath.clone()); } diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs index d99ce8b64b0..c15b4114aa7 100644 --- a/src/librustc_back/sha2.rs +++ b/src/librustc_back/sha2.rs @@ -557,7 +557,7 @@ mod tests { fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) { // Test that it works when accepting the message all at once - for t in tests.iter() { + for t in tests { sh.reset(); sh.input_str(t.input.as_slice()); let out_str = sh.result_str(); @@ -565,7 +565,7 @@ mod tests { } // Test that it works when accepting the message in pieces - for t in tests.iter() { + for t in tests { sh.reset(); let len = t.input.len(); let mut left = len; diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index 3d7adc9934f..a14f4775ec1 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -79,7 +79,7 @@ impl Svh { // avoid collisions. let mut state = SipHasher::new(); - for data in metadata.iter() { + for data in metadata { data.hash(&mut state); } @@ -97,7 +97,7 @@ impl Svh { // // We hash only the MetaItems instead of the entire Attribute // to avoid hashing the AttrId - for attr in krate.attrs.iter() { + for attr in &krate.attrs { attr.node.value.hash(&mut state); } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index 16adccfba57..697e8a75163 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -384,7 +384,7 @@ impl Target { let paths = os::split_paths(&target_path[]); // FIXME 16351: add a sane default search path? - for dir in paths.iter() { + for dir in &paths { let p = dir.join(path.clone()); if p.is_file() { return load_file(&p); diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index b66419420e9..b8d736ab9ca 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -279,7 +279,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { let loan_path = owned_ptr_base_path(loan_path); let cont = self.each_in_scope_loan(scope, |loan| { let mut ret = true; - for restr_path in loan.restricted_paths.iter() { + for restr_path in &loan.restricted_paths { if **restr_path == *loan_path { if !op(loan) { ret = false; @@ -361,7 +361,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { debug!("new_loan_indices = {:?}", new_loan_indices); self.each_issued_loan(scope, |issued_loan| { - for &new_loan_index in new_loan_indices.iter() { + for &new_loan_index in &new_loan_indices { let new_loan = &self.all_loans[new_loan_index]; self.report_error_if_loans_conflict(issued_loan, new_loan); } @@ -370,7 +370,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { for (i, &x) in new_loan_indices.iter().enumerate() { let old_loan = &self.all_loans[x]; - for &y in new_loan_indices[(i+1) ..].iter() { + for &y in &new_loan_indices[(i+1) ..] { let new_loan = &self.all_loans[y]; self.report_error_if_loans_conflict(old_loan, new_loan); } @@ -416,7 +416,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } let loan2_base_path = owned_ptr_base_path_rc(&loan2.loan_path); - for restr_path in loan1.restricted_paths.iter() { + for restr_path in &loan1.restricted_paths { if *restr_path != loan2_base_path { continue; } // If new_loan is something like `x.a`, and old_loan is something like `x.b`, we would diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index 6e71da198e3..6f51ba31182 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -204,14 +204,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { debug!("fragments 1 assigned: {:?}", path_lps(&assigned[])); // Second, build parents from the moved and assigned. - for m in moved.iter() { + for m in &moved { let mut p = this.path_parent(*m); while p != InvalidMovePathIndex { parents.push(p); p = this.path_parent(p); } } - for a in assigned.iter() { + for a in &assigned { let mut p = this.path_parent(*a); while p != InvalidMovePathIndex { parents.push(p); @@ -231,15 +231,15 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { debug!("fragments 3 assigned: {:?}", path_lps(&assigned[])); // Fourth, build the leftover from the moved, assigned, and parents. - for m in moved.iter() { + for m in &moved { let lp = this.path_loan_path(*m); add_fragment_siblings(this, tcx, &mut unmoved, lp, None); } - for a in assigned.iter() { + for a in &assigned { let lp = this.path_loan_path(*a); add_fragment_siblings(this, tcx, &mut unmoved, lp, None); } - for p in parents.iter() { + for p in &parents { let lp = this.path_loan_path(*p); add_fragment_siblings(this, tcx, &mut unmoved, lp, None); } @@ -369,7 +369,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, let fields = ty::lookup_struct_fields(tcx, def_id); match *origin_field_name { mc::NamedField(ast_name) => { - for f in fields.iter() { + for f in &fields { if f.name == ast_name { continue; } @@ -407,7 +407,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, match *origin_field_name { mc::NamedField(ast_name) => { let variant_arg_names = variant_info.arg_names.as_ref().unwrap(); - for variant_arg_ident in variant_arg_names.iter() { + for variant_arg_ident in variant_arg_names { if variant_arg_ident.name == ast_name { continue; } diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index 28d02161eeb..2cd25de431e 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -67,10 +67,10 @@ pub struct GroupedMoveErrors<'tcx> { fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, errors: &Vec<MoveError<'tcx>>) { let grouped_errors = group_errors_with_same_origin(errors); - for error in grouped_errors.iter() { + for error in &grouped_errors { report_cannot_move_out_of(bccx, error.move_from.clone()); let mut is_first_note = true; - for move_to in error.move_to_places.iter() { + for move_to in &error.move_to_places { note_move_destination(bccx, move_to.span, &move_to.ident, is_first_note); is_first_note = false; @@ -81,7 +81,7 @@ fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, fn group_errors_with_same_origin<'tcx>(errors: &Vec<MoveError<'tcx>>) -> Vec<GroupedMoveErrors<'tcx>> { let mut grouped_errors = Vec::new(); - for error in errors.iter() { + for error in errors { append_to_grouped_errors(&mut grouped_errors, error) } return grouped_errors; diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index ece9d61d7f2..76c431fa4c5 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -475,13 +475,13 @@ impl<'tcx> MoveData<'tcx> { self.kill_moves(assignment.path, assignment.id, dfcx_moves); } - for assignment in self.path_assignments.borrow().iter() { + for assignment in &*self.path_assignments.borrow() { self.kill_moves(assignment.path, assignment.id, dfcx_moves); } // Kill all moves related to a variable `x` when // it goes out of scope: - for path in self.paths.borrow().iter() { + for path in &*self.paths.borrow() { match path.loan_path.kind { LpVar(..) | LpUpvar(..) | LpDowncast(..) => { let kill_scope = path.loan_path.kill_scope(tcx); @@ -633,11 +633,11 @@ impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { //! Returns the kind of a move of `loan_path` by `id`, if one exists. let mut ret = None; - for loan_path_index in self.move_data.path_map.borrow().get(&*loan_path).iter() { + if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) { self.dfcx_moves.each_gen_bit(id, |move_index| { let the_move = self.move_data.moves.borrow(); let the_move = (*the_move)[move_index]; - if the_move.path == **loan_path_index { + if the_move.path == *loan_path_index { ret = Some(the_move.kind); false } else { @@ -688,7 +688,7 @@ impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { ret = false; } } else { - for &loan_path_index in opt_loan_path_index.iter() { + if let Some(loan_path_index) = opt_loan_path_index { let cont = self.move_data.each_base_path(moved_path, |p| { if p == loan_path_index { // Scenario 3: some extension of `loan_path` @@ -699,7 +699,7 @@ impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { true } }); - if !cont { ret = false; break } + if !cont { ret = false; } } } ret diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index 150e616b36c..8b1b156691a 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -56,7 +56,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants); let mut sets = "".to_string(); let mut seen_one = false; - for &variant in self.variants.iter() { + for &variant in &self.variants { if seen_one { sets.push_str(" "); } else { seen_one = true; } sets.push_str(variant.short_name()); sets.push_str(": "); diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 9e00844b7ee..e8ea349c3db 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -760,11 +760,11 @@ fn write_out_deps(sess: &Session, id: &str) { let mut out_filenames = Vec::new(); - for output_type in sess.opts.output_types.iter() { + for output_type in &sess.opts.output_types { let file = outputs.path(*output_type); match *output_type { config::OutputTypeExe => { - for output in sess.crate_types.borrow().iter() { + for output in &*sess.crate_types.borrow() { let p = link::filename_for_input(sess, *output, id, &file); out_filenames.push(p); @@ -800,7 +800,7 @@ fn write_out_deps(sess: &Session, .map(|fmap| escape_dep_filename(&fmap.name[])) .collect(); let mut file = try!(old_io::File::create(&deps_filename)); - for path in out_filenames.iter() { + for path in &out_filenames { try!(write!(&mut file as &mut Writer, "{}: {}\n\n", path.display(), files.connect(" "))); } diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 2eada1ff174..a8f5cfa6b3f 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -435,7 +435,7 @@ Available lint options: fn describe_debug_flags() { println!("\nAvailable debug options:\n"); - for &(name, _, opt_type_desc, desc) in config::DB_OPTIONS.iter() { + for &(name, _, opt_type_desc, desc) in config::DB_OPTIONS { let (width, extra) = match opt_type_desc { Some(..) => (21, "=val"), None => (25, "") @@ -447,7 +447,7 @@ fn describe_debug_flags() { fn describe_codegen_flags() { println!("\nAvailable codegen options:\n"); - for &(name, _, opt_type_desc, desc) in config::CG_OPTIONS.iter() { + for &(name, _, opt_type_desc, desc) in config::CG_OPTIONS { let (width, extra) = match opt_type_desc { Some(..) => (21, "=val"), None => (25, "") @@ -542,7 +542,7 @@ fn print_crate_info(sess: &Session, if sess.opts.prints.len() == 0 { return false } let attrs = input.map(|input| parse_crate_attrs(sess, input)); - for req in sess.opts.prints.iter() { + for req in &sess.opts.prints { match *req { PrintRequest::Sysroot => println!("{}", sess.sysroot().display()), PrintRequest::FileNames | @@ -566,7 +566,7 @@ fn print_crate_info(sess: &Session, let crate_types = driver::collect_crate_types(sess, attrs); let metadata = driver::collect_crate_metadata(sess, attrs); *sess.crate_metadata.borrow_mut() = metadata; - for &style in crate_types.iter() { + for &style in &crate_types { let fname = link::filename_for_input(sess, style, id.as_slice(), &t_outputs.with_extension("")); @@ -645,7 +645,7 @@ pub fn monitor<F:FnOnce()+Send>(f: F) { BUG_REPORT_URL), "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; - for note in xs.iter() { + for note in &xs { emitter.emit(None, ¬e[], None, diagnostic::Note) } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 45ff1c4537c..20bf77190be 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -147,7 +147,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { } pub fn create_region_hierarchy(&self, rh: &RH) { - for child_rh in rh.sub.iter() { + for child_rh in rh.sub { self.create_region_hierarchy(child_rh); self.infcx.tcx.region_maps.record_encl_scope( CodeExtent::from_node_id(child_rh.id), @@ -181,7 +181,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { names: &[String]) -> Option<ast::NodeId> { assert!(idx < names.len()); - for item in m.items.iter() { + for item in &m.items { if item.ident.user_string(this.infcx.tcx) == names[idx] { return search(this, &**item, idx+1, names); } diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index a24bc6eaec3..8ec1babd4da 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -261,13 +261,13 @@ impl AttrBuilder { } pub fn apply_llfn(&self, llfn: ValueRef) { - for &(idx, ref attr) in self.attrs.iter() { + for &(idx, ref attr) in &self.attrs { attr.apply_llfn(idx as c_uint, llfn); } } pub fn apply_callsite(&self, callsite: ValueRef) { - for &(idx, ref attr) in self.attrs.iter() { + for &(idx, ref attr) in &self.attrs { attr.apply_callsite(idx as c_uint, callsite); } } diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 14e80c6c8ef..c398ff72f50 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -79,7 +79,7 @@ impl<'v> Visitor<'v> for ParentVisitor { // Enum variants are parented to the enum definition itself because // they inherit privacy ast::ItemEnum(ref def, _) => { - for variant in def.variants.iter() { + for variant in &def.variants { // The parent is considered the enclosing enum because the // enum will dictate the privacy visibility of this variant // instead. @@ -93,7 +93,7 @@ impl<'v> Visitor<'v> for ParentVisitor { // parent all the methods to the trait to indicate that they're // private. ast::ItemTrait(_, _, _, ref methods) if item.vis != ast::Public => { - for m in methods.iter() { + for m in methods { match *m { ast::ProvidedMethod(ref m) => { self.parents.insert(m.id, item.id); @@ -139,7 +139,7 @@ impl<'v> Visitor<'v> for ParentVisitor { // While we have the id of the struct definition, go ahead and parent // all the fields. - for field in s.fields.iter() { + for field in &s.fields { self.parents.insert(field.node.id, self.curparent); } visit::walk_struct_def(self, s) @@ -233,7 +233,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { // Enum variants inherit from their parent, so if the enum is // public all variants are public unless they're explicitly priv ast::ItemEnum(ref def, _) if public_first => { - for variant in def.variants.iter() { + for variant in &def.variants { self.exported_items.insert(variant.node.id); } } @@ -278,7 +278,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { }); if public_ty || public_trait { - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { let meth_public = @@ -299,7 +299,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { // Default methods on traits are all public so long as the trait // is public ast::ItemTrait(_, _, _, ref methods) if public_first => { - for method in methods.iter() { + for method in methods { match *method { ast::ProvidedMethod(ref m) => { debug!("provided {}", m.id); @@ -359,7 +359,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { // crate module gets processed as well. if self.prev_exported { assert!(self.export_map.contains_key(&id), "wut {}", id); - for export in self.export_map[id].iter() { + for export in &self.export_map[id] { if is_local(export.def_id) { self.reexports.insert(export.def_id.node); } @@ -837,7 +837,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { match vpath.node { ast::ViewPathSimple(..) | ast::ViewPathGlob(..) => {} ast::ViewPathList(ref prefix, ref list) => { - for pid in list.iter() { + for pid in list { match pid.node { ast::PathListIdent { id, name } => { debug!("privacy - ident item {}", id); @@ -898,7 +898,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { ast::ExprStruct(_, ref fields, _) => { match ty::expr_ty(self.tcx, expr).sty { ty::ty_struct(id, _) => { - for field in (*fields).iter() { + for field in &(*fields) { self.check_field(expr.span, id, NamedField(field.ident.node)); } @@ -906,7 +906,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { ty::ty_enum(_, _) => { match self.tcx.def_map.borrow()[expr.id].clone() { def::DefVariant(_, variant_id, _) => { - for field in fields.iter() { + for field in fields { self.check_field(expr.span, variant_id, NamedField(field.ident.node)); } @@ -971,7 +971,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { ast::PatStruct(_, ref fields, _) => { match ty::pat_ty(self.tcx, pattern).sty { ty::ty_struct(id, _) => { - for field in fields.iter() { + for field in fields { self.check_field(pattern.span, id, NamedField(field.node.ident)); } @@ -979,7 +979,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { ty::ty_enum(_, _) => { match self.tcx.def_map.borrow().get(&pattern.id) { Some(&def::DefVariant(_, variant_id, _)) => { - for field in fields.iter() { + for field in fields { self.check_field(pattern.span, variant_id, NamedField(field.node.ident)); } @@ -1091,7 +1091,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { check_inherited(item.span, item.vis, "visibility qualifiers have no effect on trait \ impls"); - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref m) => { check_inherited(m.span, m.pe_vis(), ""); @@ -1112,7 +1112,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { } ast::ItemEnum(ref def, _) => { - for v in def.variants.iter() { + for v in &def.variants { match v.node.vis { ast::Public => { if item.vis == ast::Public { @@ -1126,7 +1126,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { } ast::ItemTrait(_, _, _, ref methods) => { - for m in methods.iter() { + for m in methods { match *m { ast::ProvidedMethod(ref m) => { check_inherited(m.span, m.pe_vis(), @@ -1157,7 +1157,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { } } let check_struct = |&: def: &ast::StructDef| { - for f in def.fields.iter() { + for f in &def.fields { match f.node.kind { ast::NamedField(_, p) => check_inherited(tcx, f.span, p), ast::UnnamedField(..) => {} @@ -1167,7 +1167,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { check_inherited(tcx, item.span, item.vis); match item.node { ast::ItemImpl(_, _, _, _, _, ref impl_items) => { - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref m) => { check_inherited(tcx, m.span, m.pe_vis()); @@ -1177,12 +1177,12 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { } } ast::ItemForeignMod(ref fm) => { - for i in fm.items.iter() { + for i in &fm.items { check_inherited(tcx, i.span, i.vis); } } ast::ItemEnum(ref def, _) => { - for v in def.variants.iter() { + for v in &def.variants { check_inherited(tcx, v.span, v.node.vis); } } @@ -1190,7 +1190,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { ast::ItemStruct(ref def, _) => check_struct(&**def), ast::ItemTrait(_, _, _, ref methods) => { - for m in methods.iter() { + for m in methods { match *m { ast::RequiredMethod(..) => {} ast::ProvidedMethod(ref m) => check_inherited(tcx, m.span, @@ -1302,7 +1302,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { return } - for bound in bounds.iter() { + for bound in &**bounds { self.check_ty_param_bound(bound) } } @@ -1371,7 +1371,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { match *trait_ref { None => { - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { visit::walk_method_helper(self, &**method) @@ -1400,7 +1400,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { // impl Public<Private> { ... }. Any public static // methods will be visible as `Public::foo`. let mut found_pub_static = false; - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { if method.pe_explicit_self().node == @@ -1439,15 +1439,15 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { } fn visit_generics(&mut self, generics: &ast::Generics) { - for ty_param in generics.ty_params.iter() { - for bound in ty_param.bounds.iter() { + for ty_param in &*generics.ty_params { + for bound in &*ty_param.bounds { self.check_ty_param_bound(bound) } } - for predicate in generics.where_clause.predicates.iter() { + for predicate in &generics.where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ref bound_pred) => { - for bound in bound_pred.bounds.iter() { + for bound in &*bound_pred.bounds { self.check_ty_param_bound(bound) } } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 65bd83d7937..a503398d484 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -223,8 +223,8 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { token::get_name(name))[]); { let r = child.span_for_namespace(ns); - for sp in r.iter() { - self.session.span_note(*sp, + if let Some(sp) = r { + self.session.span_note(sp, &format!("first definition of {} `{}` here", namespace_error_to_string(duplicate_type), token::get_name(name))[]); @@ -238,7 +238,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { fn block_needs_anonymous_module(&mut self, block: &Block) -> bool { // Check each statement. - for statement in block.stmts.iter() { + for statement in &block.stmts { match statement.node { StmtDecl(ref declaration, _) => { match declaration.node { @@ -338,7 +338,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { } } - for source_item in source_items.iter() { + for source_item in source_items { let (module_path, name) = match source_item.node { PathListIdent { name, .. } => (module_path.clone(), name.name), @@ -477,7 +477,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { let module = name_bindings.get_module(); - for variant in (*enum_definition).variants.iter() { + for variant in &(*enum_definition).variants { self.build_reduced_graph_for_variant( &**variant, local_def(item.id), @@ -591,7 +591,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { }; // For each implementation item... - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { MethodImplItem(ref method) => { // Add the method to the module. @@ -675,7 +675,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { let def_id = local_def(item.id); // Add the names of all the items to the trait info. - for trait_item in items.iter() { + for trait_item in items { let (name, kind) = match *trait_item { ast::RequiredMethod(_) | ast::ProvidedMethod(_) => { @@ -926,7 +926,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { let trait_item_def_ids = csearch::get_trait_item_def_ids(&self.session.cstore, def_id); - for trait_item_def_id in trait_item_def_ids.iter() { + for trait_item_def_id in &trait_item_def_ids { let (trait_item_name, trait_item_kind) = csearch::get_trait_item_name_and_kind( &self.session.cstore, @@ -1082,7 +1082,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { // Add each static method to the module. let new_parent = type_module; - for method_info in methods.iter() { + for method_info in methods { let name = method_info.name; debug!("(building reduced graph for \ external crate) creating \ diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 97370112ab4..a239c73c110 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -136,7 +136,7 @@ impl<'a, 'b, 'v, 'tcx> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b, 'tcx> { } ViewPathList(_, ref list) => { - for i in list.iter() { + for i in list { self.finalize_import(i.node.id(), i.span); } } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index f0711903579..c10b7124218 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1031,7 +1031,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.current_module = orig_module; build_reduced_graph::populate_module_if_necessary(self, &module_); - for (_, child_node) in module_.children.borrow().iter() { + for (_, child_node) in &*module_.children.borrow() { match child_node.get_module_if_available() { None => { // Nothing to do. @@ -1042,7 +1042,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - for (_, child_module) in module_.anonymous_children.borrow().iter() { + for (_, child_module) in &*module_.anonymous_children.borrow() { self.resolve_imports_for_module_subtree(child_module.clone()); } } @@ -1087,7 +1087,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn names_to_string(&self, names: &[Name]) -> String { let mut first = true; let mut result = String::new(); - for name in names.iter() { + for name in names { if first { first = false } else { @@ -1596,7 +1596,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Add all resolved imports from the containing module. let import_resolutions = containing_module.import_resolutions.borrow(); - for (ident, target_import_resolution) in import_resolutions.iter() { + for (ident, target_import_resolution) in &*import_resolutions { debug!("(resolving glob import) writing module resolution \ {} into `{}`", token::get_name(*ident), @@ -1657,7 +1657,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Add all children from the containing module. build_reduced_graph::populate_module_if_necessary(self, &containing_module); - for (&name, name_bindings) in containing_module.children.borrow().iter() { + for (&name, name_bindings) in &*containing_module.children.borrow() { self.merge_import_resolution(module_, containing_module.clone(), import_directive, @@ -1667,7 +1667,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } // Add external module children from the containing module. - for (&name, module) in containing_module.external_module_children.borrow().iter() { + for (&name, module) in &*containing_module.external_module_children.borrow() { let name_bindings = Rc::new(Resolver::create_name_bindings_from_module(module.clone())); self.merge_import_resolution(module_, @@ -2519,7 +2519,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Descend into children and anonymous children. build_reduced_graph::populate_module_if_necessary(self, &module_); - for (_, child_node) in module_.children.borrow().iter() { + for (_, child_node) in &*module_.children.borrow() { match child_node.get_module_if_available() { None => { // Continue. @@ -2530,7 +2530,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - for (_, module_) in module_.anonymous_children.borrow().iter() { + for (_, module_) in &*module_.anonymous_children.borrow() { self.report_unresolved_imports(module_.clone()); } } @@ -2609,7 +2609,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { DlDef(d @ DefLocal(_)) => { let node_id = d.def_id().node; let mut def = d; - for rib in ribs.iter() { + for rib in ribs { match rib.kind { NormalRibKind => { // Nothing to do. Continue. @@ -2680,7 +2680,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } DlDef(def @ DefTyParam(..)) | DlDef(def @ DefSelfTy(..)) => { - for rib in ribs.iter() { + for rib in ribs { match rib.kind { NormalRibKind | ClosureRibKind(..) => { // Nothing to do. Continue. @@ -2795,8 +2795,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // enum item: resolve all the variants' discrs, // then resolve the ty params ItemEnum(ref enum_def, ref generics) => { - for variant in (*enum_def).variants.iter() { - for dis_expr in variant.node.disr_expr.iter() { + for variant in &(*enum_def).variants { + if let Some(ref dis_expr) = variant.node.disr_expr { // resolve the discriminator expr // as a constant self.with_constant_rib(|this| { @@ -2863,7 +2863,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { this.resolve_type_parameter_bounds(item.id, bounds, TraitDerivation); - for trait_item in (*trait_items).iter() { + for trait_item in &(*trait_items) { // Create a new rib for the trait_item-specific type // parameters. // @@ -2885,7 +2885,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { this.resolve_where_clause(&ty_m.generics .where_clause); - for argument in ty_m.decl.inputs.iter() { + for argument in &ty_m.decl.inputs { this.resolve_type(&*argument.ty); } @@ -2929,7 +2929,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ItemForeignMod(ref foreign_module) => { self.with_scope(Some(name), |this| { - for foreign_item in foreign_module.items.iter() { + for foreign_item in &foreign_module.items { match foreign_item.node { ForeignItemFn(_, ref generics) => { this.with_type_parameter_rib( @@ -3075,7 +3075,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } Some(declaration) => { let mut bindings_list = HashMap::new(); - for argument in declaration.inputs.iter() { + for argument in &declaration.inputs { this.resolve_pattern(&*argument.pat, ArgumentIrrefutableMode, &mut bindings_list); @@ -3103,14 +3103,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn resolve_type_parameters(&mut self, type_parameters: &OwnedSlice<TyParam>) { - for type_parameter in type_parameters.iter() { + for type_parameter in &**type_parameters { self.resolve_type_parameter(type_parameter); } } fn resolve_type_parameter(&mut self, type_parameter: &TyParam) { - for bound in type_parameter.bounds.iter() { + for bound in &*type_parameter.bounds { self.resolve_type_parameter_bound(type_parameter.id, bound, TraitBoundingTypeParameter); } @@ -3124,7 +3124,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { id: NodeId, type_parameter_bounds: &OwnedSlice<TyParamBound>, reference_type: TraitReferenceType) { - for type_parameter_bound in type_parameter_bounds.iter() { + for type_parameter_bound in &**type_parameter_bounds { self.resolve_type_parameter_bound(id, type_parameter_bound, reference_type); } @@ -3193,12 +3193,12 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } fn resolve_where_clause(&mut self, where_clause: &ast::WhereClause) { - for predicate in where_clause.predicates.iter() { + for predicate in &where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ref bound_pred) => { self.resolve_type(&*bound_pred.bounded_ty); - for bound in bound_pred.bounds.iter() { + for bound in &*bound_pred.bounds { self.resolve_type_parameter_bound(bound_pred.bounded_ty.id, bound, TraitBoundingTypeParameter); } @@ -3236,7 +3236,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { this.resolve_where_clause(&generics.where_clause); // Resolve fields. - for field in fields.iter() { + for field in fields { this.resolve_type(&*field.node.ty); } }); @@ -3320,7 +3320,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { this.resolve_type(self_type); this.with_current_self_type(self_type, |this| { - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { MethodImplItem(ref method) => { // If this is a trait impl, ensure the method @@ -3375,7 +3375,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn check_trait_item(&self, name: Name, span: Span) { // If there is a TraitRef in scope for an impl, then the method must be in the trait. - for &(did, ref trait_ref) in self.current_trait_ref.iter() { + if let Some((did, ref trait_ref)) = self.current_trait_ref { if self.trait_item_map.get(&(name, did)).is_none() { let path_str = self.path_names_to_string(&trait_ref.path); self.resolve_error(span, @@ -3442,7 +3442,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { for (i, p) in arm.pats.iter().enumerate() { let map_i = self.binding_mode_map(&**p); - for (&key, &binding_0) in map_0.iter() { + for (&key, &binding_0) in &map_0 { match map_i.get(&key) { None => { self.resolve_error( @@ -3465,7 +3465,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - for (&key, &binding) in map_i.iter() { + for (&key, &binding) in &map_i { if !map_0.contains_key(&key) { self.resolve_error( binding.span, @@ -3482,7 +3482,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.value_ribs.push(Rib::new(NormalRibKind)); let mut bindings_list = HashMap::new(); - for pattern in arm.pats.iter() { + for pattern in &arm.pats { self.resolve_pattern(&**pattern, RefutableMode, &mut bindings_list); } @@ -3513,7 +3513,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Check for imports appearing after non-item statements. let mut found_non_item = false; - for statement in block.stmts.iter() { + for statement in &block.stmts { if let ast::StmtDecl(ref declaration, _) = statement.node { if let ast::DeclItem(ref i) = declaration.node { match i.node { @@ -4365,7 +4365,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let mut values: Vec<uint> = Vec::new(); for rib in this.value_ribs.iter().rev() { - for (&k, _) in rib.bindings.iter() { + for (&k, _) in &rib.bindings { maybes.push(token::get_name(k)); values.push(uint::MAX); } @@ -4640,7 +4640,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { build_reduced_graph::populate_module_if_necessary(self, &search_module); { - for (_, child_names) in search_module.children.borrow().iter() { + for (_, child_names) in &*search_module.children.borrow() { let def = match child_names.def_for_namespace(TypeNS) { Some(def) => def, None => continue @@ -4656,7 +4656,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } // Look for imports. - for (_, import) in search_module.import_resolutions.borrow().iter() { + for (_, import) in &*search_module.import_resolutions.borrow() { let target = match import.target_for_namespace(TypeNS) { None => continue, Some(target) => target, @@ -4766,13 +4766,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { debug!("Children:"); build_reduced_graph::populate_module_if_necessary(self, &module_); - for (&name, _) in module_.children.borrow().iter() { + for (&name, _) in &*module_.children.borrow() { debug!("* {}", token::get_name(name)); } debug!("Import resolutions:"); let import_resolutions = module_.import_resolutions.borrow(); - for (&name, import_resolution) in import_resolutions.iter() { + for (&name, import_resolution) in &*import_resolutions { let value_repr; match import_resolution.target_for_namespace(ValueNS) { None => { value_repr = "".to_string(); } diff --git a/src/librustc_resolve/record_exports.rs b/src/librustc_resolve/record_exports.rs index 67bcf152eb7..5d025f40d32 100644 --- a/src/librustc_resolve/record_exports.rs +++ b/src/librustc_resolve/record_exports.rs @@ -80,7 +80,7 @@ impl<'a, 'b, 'tcx> ExportRecorder<'a, 'b, 'tcx> { self.record_exports_for_module(&*module_); build_reduced_graph::populate_module_if_necessary(self.resolver, &module_); - for (_, child_name_bindings) in module_.children.borrow().iter() { + for (_, child_name_bindings) in &*module_.children.borrow() { match child_name_bindings.get_module_if_available() { None => { // Nothing to do. @@ -91,7 +91,7 @@ impl<'a, 'b, 'tcx> ExportRecorder<'a, 'b, 'tcx> { } } - for (_, child_module) in module_.anonymous_children.borrow().iter() { + for (_, child_module) in &*module_.anonymous_children.borrow() { self.record_exports_for_module_subtree(child_module.clone()); } } @@ -133,12 +133,12 @@ impl<'a, 'b, 'tcx> ExportRecorder<'a, 'b, 'tcx> { fn add_exports_for_module(&mut self, exports: &mut Vec<Export>, module_: &Module) { - for (name, importresolution) in module_.import_resolutions.borrow().iter() { + for (name, importresolution) in &*module_.import_resolutions.borrow() { if !importresolution.is_public { continue } let xs = [TypeNS, ValueNS]; - for &ns in xs.iter() { + for &ns in &xs { match importresolution.target_for_namespace(ns) { Some(target) => { debug!("(computing exports) maybe export '{}'", diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index c97d9090441..eeb6d9fab5e 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -194,7 +194,7 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>, symbol_hasher.input_str(&link_meta.crate_name[]); symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); - for meta in tcx.sess.crate_metadata.borrow().iter() { + for meta in &*tcx.sess.crate_metadata.borrow() { symbol_hasher.input_str(&meta[]); } symbol_hasher.input_str("-"); @@ -370,7 +370,7 @@ pub fn link_binary(sess: &Session, outputs: &OutputFilenames, crate_name: &str) -> Vec<Path> { let mut out_filenames = Vec::new(); - for &crate_type in sess.crate_types.borrow().iter() { + for &crate_type in &*sess.crate_types.borrow() { if invalid_output_for_target(sess, crate_type) { sess.bug(&format!("invalid output type `{:?}` for target os `{}`", crate_type, sess.opts.target_triple)[]); @@ -535,7 +535,7 @@ fn link_rlib<'a>(sess: &'a Session, let mut ab = ArchiveBuilder::create(config); ab.add_file(obj_filename).unwrap(); - for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() { + for &(ref l, kind) in &*sess.cstore.get_used_libraries().borrow() { match kind { cstore::NativeStatic => { ab.add_native_library(&l[]).unwrap(); @@ -721,7 +721,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { let crates = sess.cstore.get_used_crates(cstore::RequireStatic); let mut all_native_libs = vec![]; - for &(cnum, ref path) in crates.iter() { + for &(cnum, ref path) in &crates { let ref name = sess.cstore.get_crate_data(cnum).name; let p = match *path { Some(ref p) => p.clone(), None => { @@ -746,7 +746,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { and so may need to be preserved"); } - for &(kind, ref lib) in all_native_libs.iter() { + for &(kind, ref lib) in &all_native_libs { let name = match kind { cstore::NativeStatic => "static library", cstore::NativeUnknown => "library", @@ -1133,7 +1133,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // crates. let deps = sess.cstore.get_used_crates(cstore::RequireDynamic); - for &(cnum, _) in deps.iter() { + for &(cnum, _) in &deps { // We may not pass all crates through to the linker. Some crates may // appear statically in an existing dylib, meaning we'll pick up all the // symbols from the dylib. @@ -1277,7 +1277,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { let crates = sess.cstore.get_used_crates(cstore::RequireStatic); for (cnum, _) in crates.into_iter() { let libs = csearch::get_native_libraries(&sess.cstore, cnum); - for &(kind, ref lib) in libs.iter() { + for &(kind, ref lib) in &libs { match kind { cstore::NativeUnknown => { cmd.arg(format!("-l{}", *lib)); diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index 5fcf0be4c67..1a5310bb0a8 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -34,7 +34,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, } // Make sure we actually can run LTO - for crate_type in sess.crate_types.borrow().iter() { + for crate_type in &*sess.crate_types.borrow() { match *crate_type { config::CrateTypeExecutable | config::CrateTypeStaticlib => {} _ => { diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 9b195486d5d..c54e3e02d3c 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -100,7 +100,7 @@ impl SharedEmitter { fn dump(&mut self, handler: &Handler) { let mut buffer = self.buffer.lock().unwrap(); - for diag in buffer.iter() { + for diag in &*buffer { match diag.code { Some(ref code) => { handler.emit_with_code(None, @@ -452,7 +452,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, config.no_builtins); } - for pass in config.passes.iter() { + for pass in &config.passes { let pass = CString::from_slice(pass.as_bytes()); if !llvm::LLVMRustAddPass(mpm, pass.as_ptr()) { cgcx.handler.warn(format!("unknown pass {:?}, ignoring", @@ -597,7 +597,7 @@ pub fn run_passes(sess: &Session, modules_config.emit_bc = true; } - for output_type in output_types.iter() { + for output_type in output_types { match *output_type { config::OutputTypeBitcode => { modules_config.emit_bc = true; }, config::OutputTypeLlvmAssembly => { modules_config.emit_ir = true; }, @@ -761,7 +761,7 @@ pub fn run_passes(sess: &Session, // Otherwise, we produced it only as a temporary output, and will need // to get rid of it. let mut user_wants_bitcode = false; - for output_type in output_types.iter() { + for output_type in output_types { match *output_type { config::OutputTypeBitcode => { user_wants_bitcode = true; @@ -1015,7 +1015,7 @@ unsafe fn configure_llvm(sess: &Session) { // FIXME #21627 disable faulty FastISel on AArch64 (even for -O0) if sess.target.target.arch.as_slice() == "aarch64" { add("-fast-isel=0"); } - for arg in sess.opts.cg.llvm_args.iter() { + for arg in &sess.opts.cg.llvm_args { add(&(*arg)[]); } } diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index fbeaae1d1df..e522b1f3671 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -113,7 +113,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { error!("Mis-calculated spans for path '{}'. \ Found {} spans, expected {}. Found spans:", path_to_string(path), spans.len(), path.segments.len()); - for s in spans.iter() { + for s in &spans { let loc = self.sess.codemap().lookup_char_pos(s.lo); error!(" '{}' in {}, line {}", self.span.snippet(*s), loc.file.name, loc.line); @@ -204,7 +204,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { return; } let sub_paths = &sub_paths[..len-2]; - for &(ref span, ref qualname) in sub_paths.iter() { + for &(ref span, ref qualname) in sub_paths { self.fmt.sub_mod_ref_str(path.span, *span, &qualname[], @@ -264,13 +264,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } fn process_formals(&mut self, formals: &Vec<ast::Arg>, qualname: &str) { - for arg in formals.iter() { + for arg in formals { assert!(self.collected_paths.len() == 0 && !self.collecting); self.collecting = true; self.visit_pat(&*arg.pat); self.collecting = false; let span_utils = self.span.clone(); - for &(id, ref p, _, _) in self.collected_paths.iter() { + for &(id, ref p, _, _) in &self.collected_paths { let typ = ppaux::ty_to_string(&self.analysis.ty_cx, (*self.analysis.ty_cx.node_types.borrow())[id]); // get the span only for the name of the variable (I hope the path is only ever a @@ -389,7 +389,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.process_formals(&method.pe_fn_decl().inputs, qualname); // walk arg and return types - for arg in method.pe_fn_decl().inputs.iter() { + for arg in &method.pe_fn_decl().inputs { self.visit_ty(&*arg.ty); } @@ -493,7 +493,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.process_formals(&decl.inputs, &qualname[]); // walk arg and return types - for arg in decl.inputs.iter() { + for arg in &decl.inputs { self.visit_ty(&*arg.ty); } @@ -580,7 +580,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { &val[]); // fields - for field in def.fields.iter() { + for field in &def.fields { self.process_struct_field_def(field, &qualname[], item.id); self.visit_ty(&*field.node.ty); } @@ -605,7 +605,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { &format!("Could not find subspan for enum {}", enum_name)[]), } - for variant in enum_definition.variants.iter() { + for variant in &enum_definition.variants { let name = get_ident(variant.node.name); let name = name.get(); let mut qualname = enum_name.clone(); @@ -623,7 +623,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { &enum_name[], &val[], item.id); - for arg in args.iter() { + for arg in args { self.visit_ty(&*arg.ty); } } @@ -642,7 +642,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { &val[], item.id); - for field in struct_def.fields.iter() { + for field in &struct_def.fields { self.process_struct_field_def(field, qualname.as_slice(), variant.node.id); self.visit_ty(&*field.node.ty); } @@ -701,7 +701,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } self.process_generic_params(type_parameters, item.span, "", item.id); - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { visit::walk_method_helper(self, &**method) @@ -729,7 +729,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { &val[]); // super-traits - for super_bound in trait_refs.iter() { + for super_bound in &**trait_refs { let trait_ref = match *super_bound { ast::TraitTyParamBound(ref trait_ref, _) => { trait_ref @@ -759,7 +759,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // walk generics and methods self.process_generic_params(generics, item.span, &qualname[], item.id); - for method in methods.iter() { + for method in methods { self.visit_trait_item(method) } } @@ -912,11 +912,11 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => None }; - for field in fields.iter() { + for field in fields { match struct_def { Some(struct_def) => { let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def); - for f in fields.iter() { + for f in &fields { if generated_code(field.ident.span) { continue; } @@ -1010,10 +1010,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.snippet(p.span))[]); } }; - for &Spanned { node: ref field, span } in fields.iter() { + for &Spanned { node: ref field, span } in fields { let sub_span = self.span.span_for_first_ident(span); let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def); - for f in fields.iter() { + for f in fields { if f.name == field.ident.name { self.fmt.ref_str(recorder::VarRef, span, @@ -1104,7 +1104,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { let glob_map = &self.analysis.glob_map; let glob_map = glob_map.as_ref().unwrap(); if glob_map.contains_key(&item.id) { - for n in glob_map[item.id].iter() { + for n in &glob_map[item.id] { if name_string.len() > 0 { name_string.push_str(", "); } @@ -1122,7 +1122,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.write_sub_paths(path, true); } ast::ViewPathList(ref path, ref list) => { - for plid in list.iter() { + for plid in list { match plid.node { ast::PathListIdent { id, .. } => { match self.lookup_type_ref(id) { @@ -1208,8 +1208,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } fn visit_generics(&mut self, generics: &ast::Generics) { - for param in generics.ty_params.iter() { - for bound in param.bounds.iter() { + for param in &*generics.ty_params { + for bound in &*param.bounds { if let ast::TraitTyParamBound(ref trait_ref, _) = *bound { self.process_trait_ref(&trait_ref.trait_ref); } @@ -1270,7 +1270,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { scope_id); // walk arg and return types - for arg in method_type.decl.inputs.iter() { + for arg in &method_type.decl.inputs { self.visit_ty(&*arg.ty); } @@ -1349,7 +1349,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { match *ty { ty::ty_struct(def_id, _) => { let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, def_id); - for f in fields.iter() { + for f in &fields { if f.name == ident.node.name { let sub_span = self.span.span_for_last_ident(ex.span); self.fmt.ref_str(recorder::VarRef, @@ -1404,7 +1404,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.process_formals(&decl.inputs, &id[]); // walk arg and return types - for arg in decl.inputs.iter() { + for arg in &decl.inputs { self.visit_ty(&*arg.ty); } @@ -1435,7 +1435,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { fn visit_arm(&mut self, arm: &ast::Arm) { assert!(self.collected_paths.len() == 0 && !self.collecting); self.collecting = true; - for pattern in arm.pats.iter() { + for pattern in &arm.pats { // collect paths from the arm's patterns self.visit_pat(&**pattern); } @@ -1443,7 +1443,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { // This is to get around borrow checking, because we need mut self to call process_path. let mut paths_to_process = vec![]; // process collected paths - for &(id, ref p, ref immut, ref_kind) in self.collected_paths.iter() { + for &(id, ref p, ref immut, ref_kind) in &self.collected_paths { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(p.span, @@ -1477,7 +1477,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { *def) } } - for &(id, ref path, ref_kind) in paths_to_process.iter() { + for &(id, ref path, ref_kind) in &paths_to_process { self.process_path(id, path.span, path, ref_kind); } self.collecting = false; @@ -1508,7 +1508,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { let value = self.span.snippet(l.span); - for &(id, ref p, ref immut, _) in self.collected_paths.iter() { + for &(id, ref p, ref immut, _) in &self.collected_paths { let value = if *immut { value.to_string() } else { "<mutable>".to_string() }; let types = self.analysis.ty_cx.node_types.borrow(); let typ = ppaux::ty_to_string(&self.analysis.ty_cx, (*types)[id]); diff --git a/src/librustc_trans/trans/_match.rs b/src/librustc_trans/trans/_match.rs index b66e2770815..52fe8797592 100644 --- a/src/librustc_trans/trans/_match.rs +++ b/src/librustc_trans/trans/_match.rs @@ -366,7 +366,7 @@ impl<'a, 'p, 'blk, 'tcx> Repr<'tcx> for Match<'a, 'p, 'blk, 'tcx> { } fn has_nested_bindings(m: &[Match], col: uint) -> bool { - for br in m.iter() { + for br in m { match br.pats[col].node { ast::PatIdent(_, _, Some(_)) => return true, _ => () @@ -561,7 +561,7 @@ fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); let mut found: Vec<Opt> = vec![]; - for br in m.iter() { + for br in m { let cur = br.pats[col]; let opt = match cur.node { ast::PatLit(ref l) => ConstantValue(ConstantExpr(&**l)), @@ -827,7 +827,7 @@ fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bindings_map: &BindingsMap<'tcx>, cs: Option<cleanup::ScopeId>) -> Block<'blk, 'tcx> { - for (&ident, &binding_info) in bindings_map.iter() { + for (&ident, &binding_info) in bindings_map { let llval = match binding_info.trmode { // By value mut binding for a copy type: load from the ptr // into the matched value and copy to our alloca @@ -883,7 +883,7 @@ fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr)); let val = val.to_llbool(bcx); - for (_, &binding_info) in data.bindings_map.iter() { + for (_, &binding_info) in &data.bindings_map { if let TrByCopy(llbinding) = binding_info.trmode { call_lifetime_end(bcx, llbinding); } @@ -891,7 +891,7 @@ fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| { // Guard does not match: remove all bindings from the lllocals table - for (_, &binding_info) in data.bindings_map.iter() { + for (_, &binding_info) in &data.bindings_map { call_lifetime_end(bcx, binding_info.llmatch); bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id); } @@ -949,7 +949,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } None => { let data = &m[0].data; - for &(ref ident, ref value_ptr) in m[0].bound_ptrs.iter() { + for &(ref ident, ref value_ptr) in &m[0].bound_ptrs { let llmatch = data.bindings_map[*ident].llmatch; call_lifetime_start(bcx, llmatch); Store(bcx, *value_ptr, llmatch); @@ -1055,7 +1055,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, Variant(_, ref repr, _) => { let (the_kind, val_opt) = adt::trans_switch(bcx, &**repr, val); kind = the_kind; - for &tval in val_opt.iter() { test_val = tval; } + if let Some(tval) = val_opt { test_val = tval; } } SliceLengthEqual(_) | SliceLengthGreaterOrEqual(_, _) => { let (_, len) = tvec::get_base_and_len(bcx, val, left_ty); @@ -1064,7 +1064,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } } } - for o in opts.iter() { + for o in &opts { match *o { ConstantRange(_, _) => { kind = Compare; break }, SliceLengthGreaterOrEqual(_, _) => { kind = CompareSliceLength; break }, @@ -1410,7 +1410,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default); let mut arm_cxs = Vec::new(); - for arm_data in arm_datas.iter() { + for arm_data in &arm_datas { let mut bcx = arm_data.bodycx; // insert bindings into the lllocals map and add cleanups @@ -1623,7 +1623,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, }); } - for inner_pat in inner.iter() { + if let Some(ref inner_pat) = *inner { bcx = bind_irrefutable_pat(bcx, &**inner_pat, val, cleanup_scope); } } @@ -1639,7 +1639,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, &*repr, vinfo.disr_val, val); - for sub_pat in sub_pats.iter() { + if let Some(ref sub_pat) = *sub_pats { for (i, &argval) in args.vals.iter().enumerate() { bcx = bind_irrefutable_pat(bcx, &*sub_pat[i], argval, cleanup_scope); @@ -1673,7 +1673,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let pat_ty = node_id_type(bcx, pat.id); let pat_repr = adt::represent_type(bcx.ccx(), pat_ty); expr::with_field_tys(tcx, pat_ty, Some(pat.id), |discr, field_tys| { - for f in fields.iter() { + for f in fields { let ix = ty::field_idx_strict(tcx, f.node.ident.name, field_tys); let fldptr = adt::trans_field_ptr(bcx, &*pat_repr, val, discr, ix); diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs index 1deb07e1ba0..b99622ce911 100644 --- a/src/librustc_trans/trans/adt.rs +++ b/src/librustc_trans/trans/adt.rs @@ -285,7 +285,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // alignment of the type. let (_, align) = union_size_and_align(fields.as_slice()); let mut use_align = true; - for st in fields.iter() { + for st in &fields { // Get the first non-zero-sized field let field = st.fields.iter().skip(1).filter(|ty| { let t = type_of::sizing_type_of(cx, **ty); @@ -519,7 +519,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp cx.tcx().sess.bug("range_to_inttype: found ReprPacked on an enum"); } } - for &ity in attempts.iter() { + for &ity in attempts { if bounds_usable(cx, ity, bounds) { return ity; } @@ -563,7 +563,7 @@ fn ensure_struct_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, packed: bool, scapegoat: Ty<'tcx>) { let mut offset = 0; - for &llty in fields.iter() { + for &llty in fields { // Invariant: offset < ccx.obj_size_bound() <= 1<<61 if !packed { let type_align = machine::llalign_of_min(ccx, llty); @@ -1097,7 +1097,7 @@ fn compute_struct_field_offsets<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let mut offsets = vec!(); let mut offset = 0; - for &ty in st.fields.iter() { + for &ty in &st.fields { let llty = type_of::sizing_type_of(ccx, ty); if !st.packed { let type_align = type_of::align_of(ccx, ty); diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs index a7afa17ab72..e4077f26ba8 100644 --- a/src/librustc_trans/trans/base.rs +++ b/src/librustc_trans/trans/base.rs @@ -350,7 +350,7 @@ pub fn get_extern_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, did: ast::DefId, // don't do this then linker errors can be generated where the linker // complains that one object files has a thread local version of the // symbol and another one doesn't. - for attr in ty::get_attrs(ccx.tcx(), did).iter() { + for attr in &*ty::get_attrs(ccx.tcx(), did) { if attr.check_name("thread_local") { llvm::set_thread_local(c, true); } @@ -442,7 +442,7 @@ pub fn set_llvm_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: Val InlineNone => { /* fallthrough */ } } - for attr in attrs.iter() { + for attr in attrs { let mut used = true; match attr.name().get() { "no_stack_check" => unset_split_stack(llfn), @@ -765,7 +765,7 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, n_variants); let next_cx = fcx.new_temp_block("enum-iter-next"); - for variant in (*variants).iter() { + for variant in &(*variants) { let variant_cx = fcx.new_temp_block( &format!("enum-iter-variant-{}", @@ -970,7 +970,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if need_invoke(bcx) { debug!("invoking {} at {:?}", bcx.val_to_string(llfn), bcx.llbb); - for &llarg in llargs.iter() { + for &llarg in llargs { debug!("arg: {}", bcx.val_to_string(llarg)); } let normal_bcx = bcx.fcx.new_temp_block("normal-return"); @@ -986,7 +986,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, return (llresult, normal_bcx); } else { debug!("calling {} at {:?}", bcx.val_to_string(llfn), bcx.llbb); - for &llarg in llargs.iter() { + for &llarg in llargs { debug!("arg: {}", bcx.val_to_string(llarg)); } @@ -1830,7 +1830,7 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, vec![ty::mk_tup(ccx.tcx(), monomorphized_arg_types)] } }; - for monomorphized_arg_type in monomorphized_arg_types.iter() { + for monomorphized_arg_type in &monomorphized_arg_types { debug!("trans_closure: monomorphized_arg_type: {}", ty_to_string(ccx.tcx(), *monomorphized_arg_type)); } @@ -1908,7 +1908,7 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // This somewhat improves single-stepping experience in debugger. unsafe { let llreturn = fcx.llreturn.get(); - for &llreturn in llreturn.iter() { + if let Some(llreturn) = llreturn { llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb); } } @@ -2109,7 +2109,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, let avar = adt::represent_type(ccx, ty); match *avar { adt::General(_, ref variants, _) => { - for var in variants.iter() { + for var in variants { let mut size = 0; for field in var.fields.iter().skip(1) { // skip the discriminant @@ -2382,7 +2382,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { // and control visibility. pub fn trans_mod(ccx: &CrateContext, m: &ast::Mod) { let _icx = push_ctxt("trans_mod"); - for item in m.items.iter() { + for item in &m.items { trans_item(ccx, &**item); } } @@ -3161,7 +3161,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) stats.fn_stats.borrow_mut().sort_by(|&(_, insns_a), &(_, insns_b)| { insns_b.cmp(&insns_a) }); - for tuple in stats.fn_stats.borrow().iter() { + for tuple in &*stats.fn_stats.borrow() { match *tuple { (ref name, insns) => { println!("{} insns, {}", insns, *name); @@ -3170,7 +3170,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) } } if shared_ccx.sess().count_llvm_insns() { - for (k, v) in shared_ccx.stats().llvm_insns.borrow().iter() { + for (k, v) in &*shared_ccx.stats().llvm_insns.borrow() { println!("{:7} {}", *v, *k); } } diff --git a/src/librustc_trans/trans/cabi_aarch64.rs b/src/librustc_trans/trans/cabi_aarch64.rs index ce1cf54919c..5d1e6d2c9e8 100644 --- a/src/librustc_trans/trans/cabi_aarch64.rs +++ b/src/librustc_trans/trans/cabi_aarch64.rs @@ -150,7 +150,7 @@ pub fn compute_abi_info(ccx: &CrateContext, rty: Type, ret_def: bool) -> FnType { let mut arg_tys = Vec::new(); - for &aty in atys.iter() { + for &aty in atys { let ty = classify_arg_ty(ccx, aty); arg_tys.push(ty); } diff --git a/src/librustc_trans/trans/cabi_arm.rs b/src/librustc_trans/trans/cabi_arm.rs index ac32ce49511..50014230df6 100644 --- a/src/librustc_trans/trans/cabi_arm.rs +++ b/src/librustc_trans/trans/cabi_arm.rs @@ -186,7 +186,7 @@ pub fn compute_abi_info(ccx: &CrateContext, }; let mut arg_tys = Vec::new(); - for &aty in atys.iter() { + for &aty in atys { let ty = classify_arg_ty(ccx, aty, align_fn); arg_tys.push(ty); } diff --git a/src/librustc_trans/trans/cabi_mips.rs b/src/librustc_trans/trans/cabi_mips.rs index af7956b46be..fecd1a9013b 100644 --- a/src/librustc_trans/trans/cabi_mips.rs +++ b/src/librustc_trans/trans/cabi_mips.rs @@ -176,7 +176,7 @@ pub fn compute_abi_info(ccx: &CrateContext, let mut arg_tys = Vec::new(); let mut offset = if sret { 4 } else { 0 }; - for aty in atys.iter() { + for aty in atys { let ty = classify_arg_ty(ccx, *aty, &mut offset); arg_tys.push(ty); }; diff --git a/src/librustc_trans/trans/cabi_powerpc.rs b/src/librustc_trans/trans/cabi_powerpc.rs index c3b0026de98..9583158a0e2 100644 --- a/src/librustc_trans/trans/cabi_powerpc.rs +++ b/src/librustc_trans/trans/cabi_powerpc.rs @@ -171,7 +171,7 @@ pub fn compute_abi_info(ccx: &CrateContext, let mut arg_tys = Vec::new(); let mut offset = if sret { 4 } else { 0 }; - for aty in atys.iter() { + for aty in atys { let ty = classify_arg_ty(ccx, *aty, &mut offset); arg_tys.push(ty); }; diff --git a/src/librustc_trans/trans/cabi_x86.rs b/src/librustc_trans/trans/cabi_x86.rs index 32f6eb060c0..028d20f3084 100644 --- a/src/librustc_trans/trans/cabi_x86.rs +++ b/src/librustc_trans/trans/cabi_x86.rs @@ -60,7 +60,7 @@ pub fn compute_abi_info(ccx: &CrateContext, ret_ty = ArgType::direct(rty, None, None, attr); } - for &t in atys.iter() { + for &t in atys { let ty = match t.kind() { Struct => { let size = llsize_of_alloc(ccx, t); diff --git a/src/librustc_trans/trans/cabi_x86_64.rs b/src/librustc_trans/trans/cabi_x86_64.rs index c0ab0d24dab..22318fb6c14 100644 --- a/src/librustc_trans/trans/cabi_x86_64.rs +++ b/src/librustc_trans/trans/cabi_x86_64.rs @@ -195,7 +195,7 @@ fn classify_ty(ty: Type) -> Vec<RegClass> { off: uint, packed: bool) { let mut field_off = off; - for ty in tys.iter() { + for ty in tys { if !packed { field_off = align(field_off, *ty); } @@ -333,7 +333,7 @@ fn classify_ty(ty: Type) -> Vec<RegClass> { fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type { fn llvec_len(cls: &[RegClass]) -> uint { let mut len = 1; - for c in cls.iter() { + for c in cls { if *c != SSEUp { break; } @@ -413,7 +413,7 @@ pub fn compute_abi_info(ccx: &CrateContext, } let mut arg_tys = Vec::new(); - for t in atys.iter() { + for t in atys { let ty = x86_64_ty(ccx, *t, |cls| cls.is_pass_byval(), ByValAttribute); arg_tys.push(ty); } diff --git a/src/librustc_trans/trans/cabi_x86_win64.rs b/src/librustc_trans/trans/cabi_x86_win64.rs index 79e309dacaa..9b34c3bf262 100644 --- a/src/librustc_trans/trans/cabi_x86_win64.rs +++ b/src/librustc_trans/trans/cabi_x86_win64.rs @@ -38,7 +38,7 @@ pub fn compute_abi_info(ccx: &CrateContext, ret_ty = ArgType::direct(rty, None, None, attr); } - for &t in atys.iter() { + for &t in atys { let ty = match t.kind() { Struct => { match llsize_of_alloc(ccx, t) { diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs index 096ea22eaac..ac76b52598d 100644 --- a/src/librustc_trans/trans/cleanup.rs +++ b/src/librustc_trans/trans/cleanup.rs @@ -72,7 +72,7 @@ impl<'blk, 'tcx: 'blk> fmt::Debug for CleanupScopeKind<'blk, 'tcx> { AstScopeKind(nid) => write!(f, "AstScopeKind({})", nid), LoopScopeKind(nid, ref blks) => { try!(write!(f, "LoopScopeKind({}, [", nid)); - for blk in blks.iter() { + for blk in blks { try!(write!(f, "{:p}, ", blk)); } write!(f, "])") diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs index 5782b3987cb..392d320f166 100644 --- a/src/librustc_trans/trans/common.rs +++ b/src/librustc_trans/trans/common.rs @@ -515,7 +515,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { -> Block<'a, 'tcx> { let out = self.new_id_block("join", id); let mut reachable = false; - for bcx in in_cxs.iter() { + for bcx in in_cxs { if !bcx.unreachable.get() { build::Br(*bcx, out.llbb, DebugLoc::None); reachable = true; diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs index c4388603145..651058a5674 100644 --- a/src/librustc_trans/trans/controlflow.rs +++ b/src/librustc_trans/trans/controlflow.rs @@ -96,7 +96,7 @@ pub fn trans_block<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(), b.id, b.span, true); fcx.push_ast_cleanup_scope(cleanup_debug_loc); - for s in b.stmts.iter() { + for s in &b.stmts { bcx = trans_stmt(bcx, &**s); } diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index 258a6fb958d..9032eaeae50 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -375,7 +375,7 @@ impl<'tcx> TypeMap<'tcx> { }, ty::ty_tup(ref component_types) => { unique_type_id.push_str("tuple "); - for &component_type in component_types.iter() { + for &component_type in component_types { let component_type_id = self.get_unique_type_id_of_type(cx, component_type); let component_type_id = @@ -447,7 +447,7 @@ impl<'tcx> TypeMap<'tcx> { let sig = ty::erase_late_bound_regions(cx.tcx(), sig); - for ¶meter_type in sig.inputs.iter() { + for ¶meter_type in &sig.inputs { let parameter_type_id = self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = @@ -533,7 +533,7 @@ impl<'tcx> TypeMap<'tcx> { if tps.len() > 0 { output.push('<'); - for &type_parameter in tps.iter() { + for &type_parameter in tps { let param_type_id = type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = @@ -563,7 +563,7 @@ impl<'tcx> TypeMap<'tcx> { let sig = ty::erase_late_bound_regions(cx.tcx(), sig); - for ¶meter_type in sig.inputs.iter() { + for ¶meter_type in &sig.inputs { let parameter_type_id = self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = @@ -1440,7 +1440,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } // Arguments types - for arg in fn_decl.inputs.iter() { + for arg in &fn_decl.inputs { assert_type_for_node_id(cx, arg.pat.id, arg.pat.span); let arg_type = ty::node_id_to_type(cx.tcx(), arg.pat.id); let arg_type = monomorphize::apply_param_substs(cx.tcx(), @@ -2838,7 +2838,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, }); // regular arguments - for &argument_type in signature.inputs.iter() { + for &argument_type in &signature.inputs { signature_metadata.push(type_metadata(cx, argument_type, span)); } @@ -3226,7 +3226,7 @@ fn create_scope_map(cx: &CrateContext, // Push argument identifiers onto the stack so arguments integrate nicely // with variable shadowing. - for arg in args.iter() { + for arg in args { pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, _, path1| { scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata, ident: Some(path1.node) }); @@ -3292,7 +3292,7 @@ fn create_scope_map(cx: &CrateContext, scope_map.insert(block.id, scope_stack.last().unwrap().scope_metadata); // The interesting things here are statements and the concluding expression. - for statement in block.stmts.iter() { + for statement in &block.stmts { scope_map.insert(ast_util::stmt_id(&**statement), scope_stack.last().unwrap().scope_metadata); @@ -3306,7 +3306,7 @@ fn create_scope_map(cx: &CrateContext, } } - for exp in block.expr.iter() { + if let Some(ref exp) = block.expr { walk_expr(cx, &**exp, scope_stack, scope_map); } } @@ -3321,7 +3321,7 @@ fn create_scope_map(cx: &CrateContext, walk_pattern(cx, &*local.pat, scope_stack, scope_map); - for exp in local.init.iter() { + if let Some(ref exp) = local.init { walk_expr(cx, &**exp, scope_stack, scope_map); } } @@ -3407,7 +3407,7 @@ fn create_scope_map(cx: &CrateContext, scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for sub_pat in sub_pat_opt.iter() { + if let Some(ref sub_pat) = *sub_pat_opt { walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } @@ -3419,8 +3419,8 @@ fn create_scope_map(cx: &CrateContext, ast::PatEnum(_, ref sub_pats_opt) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for sub_pats in sub_pats_opt.iter() { - for p in sub_pats.iter() { + if let Some(ref sub_pats) = *sub_pats_opt { + for p in sub_pats { walk_pattern(cx, &**p, scope_stack, scope_map); } } @@ -3440,7 +3440,7 @@ fn create_scope_map(cx: &CrateContext, ast::PatTup(ref sub_pats) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for sub_pat in sub_pats.iter() { + for sub_pat in sub_pats { walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } @@ -3464,15 +3464,15 @@ fn create_scope_map(cx: &CrateContext, ast::PatVec(ref front_sub_pats, ref middle_sub_pats, ref back_sub_pats) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for sub_pat in front_sub_pats.iter() { + for sub_pat in front_sub_pats { walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } - for sub_pat in middle_sub_pats.iter() { + if let Some(ref sub_pat) = *middle_sub_pats { walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } - for sub_pat in back_sub_pats.iter() { + for sub_pat in back_sub_pats { walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } @@ -3534,7 +3534,7 @@ fn create_scope_map(cx: &CrateContext, ast::ExprVec(ref init_expressions) | ast::ExprTup(ref init_expressions) => { - for ie in init_expressions.iter() { + for ie in init_expressions { walk_expr(cx, &**ie, scope_stack, scope_map); } } @@ -3612,7 +3612,7 @@ fn create_scope_map(cx: &CrateContext, scope_stack, scope_map, |cx, scope_stack, scope_map| { - for &ast::Arg { pat: ref pattern, .. } in decl.inputs.iter() { + for &ast::Arg { pat: ref pattern, .. } in &decl.inputs { walk_pattern(cx, &**pattern, scope_stack, scope_map); } @@ -3623,13 +3623,13 @@ fn create_scope_map(cx: &CrateContext, ast::ExprCall(ref fn_exp, ref args) => { walk_expr(cx, &**fn_exp, scope_stack, scope_map); - for arg_exp in args.iter() { + for arg_exp in args { walk_expr(cx, &**arg_exp, scope_stack, scope_map); } } ast::ExprMethodCall(_, _, ref args) => { - for arg_exp in args.iter() { + for arg_exp in args { walk_expr(cx, &**arg_exp, scope_stack, scope_map); } } @@ -3642,7 +3642,7 @@ fn create_scope_map(cx: &CrateContext, // walk only one pattern per arm, as they all must contain the // same binding names. - for arm_ref in arms.iter() { + for arm_ref in arms { let arm_span = arm_ref.pats[0].span; with_new_scope(cx, @@ -3650,11 +3650,11 @@ fn create_scope_map(cx: &CrateContext, scope_stack, scope_map, |cx, scope_stack, scope_map| { - for pat in arm_ref.pats.iter() { + for pat in &arm_ref.pats { walk_pattern(cx, &**pat, scope_stack, scope_map); } - for guard_exp in arm_ref.guard.iter() { + if let Some(ref guard_exp) = arm_ref.guard { walk_expr(cx, &**guard_exp, scope_stack, scope_map) } @@ -3664,7 +3664,7 @@ fn create_scope_map(cx: &CrateContext, } ast::ExprStruct(_, ref fields, ref base_exp) => { - for &ast::Field { expr: ref exp, .. } in fields.iter() { + for &ast::Field { expr: ref exp, .. } in fields { walk_expr(cx, &**exp, scope_stack, scope_map); } @@ -3678,11 +3678,11 @@ fn create_scope_map(cx: &CrateContext, ref outputs, .. }) => { // inputs, outputs: Vec<(String, P<Expr>)> - for &(_, ref exp) in inputs.iter() { + for &(_, ref exp) in inputs { walk_expr(cx, &**exp, scope_stack, scope_map); } - for &(_, ref exp, _) in outputs.iter() { + for &(_, ref exp, _) in outputs { walk_expr(cx, &**exp, scope_stack, scope_map); } } @@ -3737,7 +3737,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, }, ty::ty_tup(ref component_types) => { output.push('('); - for &component_type in component_types.iter() { + for &component_type in component_types { push_debuginfo_type_name(cx, component_type, true, output); output.push_str(", "); } @@ -3802,7 +3802,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let sig = ty::erase_late_bound_regions(cx.tcx(), sig); if sig.inputs.len() > 0 { - for ¶meter_type in sig.inputs.iter() { + for ¶meter_type in &sig.inputs { push_debuginfo_type_name(cx, parameter_type, true, output); output.push_str(", "); } diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs index cf85389cd5b..bed43a5c838 100644 --- a/src/librustc_trans/trans/expr.rs +++ b/src/librustc_trans/trans/expr.rs @@ -1451,7 +1451,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let custom_cleanup_scope = fcx.push_custom_cleanup_scope(); // First we trans the base, if we have one, to the dest - for base in optbase.iter() { + if let Some(base) = optbase { assert_eq!(discr, 0); match ty::expr_kind(bcx.tcx(), &*base.expr) { @@ -1461,7 +1461,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"), _ => { let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base")); - for &(i, t) in base.fields.iter() { + for &(i, t) in &base.fields { let datum = base_datum.get_element( bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i)); assert!(type_is_sized(bcx.tcx(), datum.ty)); @@ -1485,7 +1485,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, // (i.e. avoid GEPi and `store`s to an alloca) . let mut vec_val = C_undef(llty); - for &(i, ref e) in fields.iter() { + for &(i, ref e) in fields { let block_datum = trans(bcx, &**e); bcx = block_datum.bcx; let position = C_uint(bcx.ccx(), i); @@ -1495,7 +1495,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, Store(bcx, vec_val, addr); } else { // Now, we just overwrite the fields we've explicitly specified - for &(i, ref e) in fields.iter() { + for &(i, ref e) in fields { let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i); let e_ty = expr_ty_adjusted(bcx, &**e); bcx = trans_into(bcx, &**e, SaveIn(dest)); diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs index 6c017866ef0..00bb3036546 100644 --- a/src/librustc_trans/trans/foreign.rs +++ b/src/librustc_trans/trans/foreign.rs @@ -352,7 +352,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } arg_idx += 1; - for arg_ty in fn_type.arg_tys.iter() { + for arg_ty in &fn_type.arg_tys { if arg_ty.is_ignore() { continue; } @@ -453,7 +453,7 @@ fn gate_simd_ffi(tcx: &ty::ctxt, decl: &ast::FnDecl, ty: &ty::BareFnTy) { pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) { let _icx = push_ctxt("foreign::trans_foreign_mod"); - for foreign_item in foreign_mod.items.iter() { + for foreign_item in &foreign_mod.items { let lname = link_name(&**foreign_item); if let ast::ForeignItemFn(ref decl, _) = foreign_item.node { @@ -936,7 +936,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T } }; - for &arg_ty in tys.fn_ty.arg_tys.iter() { + for &arg_ty in &tys.fn_ty.arg_tys { if arg_ty.is_ignore() { continue; } @@ -987,7 +987,7 @@ fn add_argument_attributes(tys: &ForeignTypes, i += 1; - for &arg_ty in tys.fn_ty.arg_tys.iter() { + for &arg_ty in &tys.fn_ty.arg_tys { if arg_ty.is_ignore() { continue; } diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs index 497237da38c..5d26daab5cd 100644 --- a/src/librustc_trans/trans/glue.rs +++ b/src/librustc_trans/trans/glue.rs @@ -572,7 +572,7 @@ pub fn emit_tydescs(ccx: &CrateContext) { // As of this point, allow no more tydescs to be created. ccx.finished_tydescs().set(true); let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to(); - for (_, ti) in ccx.tydescs().borrow().iter() { + for (_, ti) in &*ccx.tydescs().borrow() { // Each of the glue functions needs to be cast to a generic type // before being put into the tydesc because we only have a singleton // tydesc type. Then we'll recast each function to its real type when diff --git a/src/librustc_trans/trans/intrinsic.rs b/src/librustc_trans/trans/intrinsic.rs index 9bee2c5bbc6..340fcb76058 100644 --- a/src/librustc_trans/trans/intrinsic.rs +++ b/src/librustc_trans/trans/intrinsic.rs @@ -92,7 +92,7 @@ pub fn get_simple_intrinsic(ccx: &CrateContext, item: &ast::ForeignItem) -> Opti /// the only intrinsic that needs such verification is `transmute`. pub fn check_intrinsics(ccx: &CrateContext) { let mut last_failing_id = None; - for transmute_restriction in ccx.tcx().transmute_restrictions.borrow().iter() { + for transmute_restriction in &*ccx.tcx().transmute_restrictions.borrow() { // Sometimes, a single call to transmute will push multiple // type pairs to test in order to exhaustively test the // possibility around a type parameter. If one of those fails, diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs index 335c639df90..f522024c2e7 100644 --- a/src/librustc_trans/trans/meth.rs +++ b/src/librustc_trans/trans/meth.rs @@ -65,7 +65,7 @@ pub fn trans_impl(ccx: &CrateContext, // items that we need to translate. if !generics.ty_params.is_empty() { let mut v = TransItemVisitor{ ccx: ccx }; - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { visit::walk_method_helper(&mut v, &**method); @@ -75,7 +75,7 @@ pub fn trans_impl(ccx: &CrateContext, } return; } - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { if method.pe_generics().ty_params.len() == 0 { diff --git a/src/librustc_trans/trans/tvec.rs b/src/librustc_trans/trans/tvec.rs index 844795b667b..66f603cbe07 100644 --- a/src/librustc_trans/trans/tvec.rs +++ b/src/librustc_trans/trans/tvec.rs @@ -263,7 +263,7 @@ pub fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ast::ExprVec(ref elements) => { match dest { Ignore => { - for element in elements.iter() { + for element in elements { bcx = expr::trans_into(bcx, &**element, Ignore); } } diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs index 6b6ca600a88..2243982c20d 100644 --- a/src/librustc_trans/trans/type_of.rs +++ b/src/librustc_trans/trans/type_of.rs @@ -84,7 +84,7 @@ pub fn untuple_arguments_if_necessary<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match inputs[inputs.len() - 1].sty { ty::ty_tup(ref tupled_arguments) => { debug!("untuple_arguments_if_necessary(): untupling arguments"); - for &tupled_argument in tupled_arguments.iter() { + for &tupled_argument in tupled_arguments { result.push(tupled_argument); } } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 350227c6662..d14945d1c9f 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -373,7 +373,7 @@ fn create_substs_for_ast_path<'tcx>( } } - for param in ty_param_defs[supplied_ty_param_count..].iter() { + for param in &ty_param_defs[supplied_ty_param_count..] { match param.default { Some(default) => { // This is a default type parameter. @@ -659,7 +659,7 @@ fn ast_path_to_trait_ref<'a,'tcx>( prohibit_projections(this.tcx(), assoc_bindings.as_slice()); } Some(ref mut v) => { - for binding in assoc_bindings.iter() { + for binding in &assoc_bindings { match ast_type_binding_to_projection_predicate(this, trait_ref.clone(), self_ty, binding) { Ok(pp) => { v.push(pp); } @@ -979,7 +979,7 @@ fn associated_path_def_to_ty<'tcx>(this: &AstConv<'tcx>, token::get_name(assoc_name), token::get_name(ty_param_name)); - for suitable_bound in suitable_bounds.iter() { + for suitable_bound in &suitable_bounds { span_note!(this.tcx().sess, ast_ty.span, "associated type `{}` could derive from `{}`", token::get_name(ty_param_name), @@ -1710,7 +1710,7 @@ pub fn partition_bounds<'a>(tcx: &ty::ctxt, let mut region_bounds = Vec::new(); let mut trait_bounds = Vec::new(); let mut trait_def_ids = DefIdMap(); - for ast_bound in ast_bounds.iter() { + for ast_bound in ast_bounds { match *ast_bound { ast::TraitTyParamBound(ref b, ast::TraitBoundModifier::None) => { match ::lookup_def_tcx(tcx, b.trait_ref.path.span, b.trait_ref.ref_id) { diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index c4b7ffb8729..174a9029534 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -240,7 +240,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, // below for details. demand::eqtype(fcx, pat.span, expected, pat_ty); - for elt in before.iter() { + for elt in before { check_pat(pcx, &**elt, inner_ty); } if let Some(ref slice) = *slice { @@ -254,7 +254,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, }); check_pat(pcx, &**slice, slice_ty); } - for elt in after.iter() { + for elt in after { check_pat(pcx, &**elt, inner_ty); } } @@ -348,12 +348,12 @@ pub fn check_match<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // Typecheck the patterns first, so that we get types for all the // bindings. - for arm in arms.iter() { + for arm in arms { let mut pcx = pat_ctxt { fcx: fcx, map: pat_id_map(&tcx.def_map, &*arm.pats[0]), }; - for p in arm.pats.iter() { + for p in &arm.pats { check_pat(&mut pcx, &**p, discrim_ty); } } @@ -439,7 +439,7 @@ pub fn check_pat_struct<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &'tcx ast::Pat, "use of trait `{}` in a struct pattern", name); fcx.write_error(pat.id); - for field in fields.iter() { + for field in fields { check_pat(pcx, &*field.node.pat, tcx.types.err); } return; @@ -458,7 +458,7 @@ pub fn check_pat_struct<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &'tcx ast::Pat, "`{}` does not name a struct or a struct variant", name); fcx.write_error(pat.id); - for field in fields.iter() { + for field in fields { check_pat(pcx, &*field.node.pat, tcx.types.err); } return; @@ -540,7 +540,7 @@ pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, fcx.write_error(pat.id); if let Some(subpats) = subpats { - for pat in subpats.iter() { + for pat in subpats { check_pat(pcx, &**pat, tcx.types.err); } } @@ -558,7 +558,7 @@ pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, "this pattern has {} field{}, but the corresponding {} has no fields", subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name); - for pat in subpats.iter() { + for pat in subpats { check_pat(pcx, &**pat, tcx.types.err); } } else { @@ -568,7 +568,7 @@ pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, kind_name, arg_tys.len(), if arg_tys.len() == 1 {""} else {"s"}); - for pat in subpats.iter() { + for pat in subpats { check_pat(pcx, &**pat, tcx.types.err); } } @@ -598,7 +598,7 @@ pub fn check_struct_pat_fields<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, let mut used_fields = FnvHashMap(); // Typecheck each field. - for &Spanned { node: ref field, span } in fields.iter() { + for &Spanned { node: ref field, span } in fields { let field_type = match used_fields.entry(field.ident.name) { Occupied(occupied) => { span_err!(tcx.sess, span, E0025, diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index e9ea0921bc9..8ad67b43178 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -134,7 +134,7 @@ pub fn probe<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // Create a list of simplified self types, if we can. let mut simplified_steps = Vec::new(); - for step in steps.iter() { + for step in &steps { match fast_reject::simplify_type(fcx.tcx(), step.self_ty, true) { None => { break; } Some(simplified_type) => { simplified_steps.push(simplified_type); } @@ -236,7 +236,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { fn assemble_inherent_candidates(&mut self) { let steps = self.steps.clone(); - for step in steps.iter() { + for step in &*steps { self.assemble_probe(step.self_ty); } } @@ -268,8 +268,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // metadata if necessary. ty::populate_implementations_for_type_if_necessary(self.tcx(), def_id); - for impl_infos in self.tcx().inherent_impls.borrow().get(&def_id).iter() { - for &impl_def_id in impl_infos.iter() { + if let Some(impl_infos) = self.tcx().inherent_impls.borrow().get(&def_id) { + for &impl_def_id in &***impl_infos { self.assemble_inherent_impl_probe(impl_def_id); } } @@ -449,7 +449,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { let mut duplicates = HashSet::new(); let opt_applicable_traits = self.fcx.ccx.trait_map.get(&expr_id); for applicable_traits in opt_applicable_traits.into_iter() { - for &trait_did in applicable_traits.iter() { + for &trait_did in applicable_traits { if duplicates.insert(trait_did) { try!(self.assemble_extension_candidates_for_trait(trait_did)); } @@ -530,7 +530,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { Some(impls) => impls, }; - for &impl_def_id in impl_def_ids.borrow().iter() { + for &impl_def_id in &*impl_def_ids.borrow() { debug!("assemble_extension_candidates_for_trait_impl: trait_def_id={} impl_def_id={}", trait_def_id.repr(self.tcx()), impl_def_id.repr(self.tcx())); @@ -601,7 +601,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // Check if there is an unboxed-closure self-type in the list of receivers. // If so, add "synthetic impls". let steps = self.steps.clone(); - for step in steps.iter() { + for step in &*steps { let (closure_def_id, _, _) = match step.self_ty.sty { ty::ty_closure(a, b, ref c) => (a, b, c), _ => continue, @@ -653,7 +653,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { method.repr(self.tcx()), method_index); - for step in self.steps.iter() { + for step in &*self.steps { debug!("assemble_projection_candidates: step={}", step.repr(self.tcx())); diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index c193e1ef483..a704508e6fa 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -735,7 +735,7 @@ pub fn check_item<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx ast::Item) { None => { } } - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref m) => { check_method_body(ccx, &impl_pty.generics, &**m); @@ -750,7 +750,7 @@ pub fn check_item<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx ast::Item) { ast::ItemTrait(_, ref generics, _, ref trait_methods) => { check_trait_on_unimplemented(ccx, generics, it); let trait_def = ty::lookup_trait_def(ccx.tcx, local_def(it.id)); - for trait_method in trait_methods.iter() { + for trait_method in trait_methods { match *trait_method { RequiredMethod(..) => { // Nothing to do, since required methods don't have @@ -774,11 +774,11 @@ pub fn check_item<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx ast::Item) { } ast::ItemForeignMod(ref m) => { if m.abi == abi::RustIntrinsic { - for item in m.items.iter() { + for item in &m.items { check_intrinsic_type(ccx, &**item); } } else { - for item in m.items.iter() { + for item in &m.items { let pty = ty::lookup_item_type(ccx.tcx, local_def(item.id)); if !pty.generics.types.is_empty() { span_err!(ccx.tcx.sess, item.span, E0044, @@ -879,7 +879,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // Check existing impl methods to see if they are both present in trait // and compatible with trait signature - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref impl_method) => { let impl_method_def_id = local_def(impl_method.id); @@ -969,7 +969,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // Check for missing items from trait let provided_methods = ty::provided_trait_methods(tcx, impl_trait_ref.def_id); let mut missing_methods = Vec::new(); - for trait_item in trait_items.iter() { + for trait_item in &*trait_items { match *trait_item { ty::MethodTraitItem(ref trait_method) => { let is_implemented = @@ -2321,7 +2321,7 @@ fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // of arguments when we typecheck the functions. This isn't really the // right way to do this. let xs = [false, true]; - for check_blocks in xs.iter() { + for check_blocks in &xs { let check_blocks = *check_blocks; debug!("check_blocks={}", check_blocks); @@ -3101,7 +3101,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let mut best_dist = name.len(); let fields = ty::lookup_struct_fields(tcx, id); let mut best = None; - for elem in fields.iter() { + for elem in &fields { let n = elem.name.as_str(); // ignore already set fields if skip.iter().any(|&x| x == n) { @@ -3199,14 +3199,14 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let mut class_field_map = FnvHashMap(); let mut fields_found = 0; - for field in field_types.iter() { + for field in field_types { class_field_map.insert(field.name, (field.id, false)); } let mut error_happened = false; // Typecheck each field. - for field in ast_fields.iter() { + for field in ast_fields { let mut expected_field_type = tcx.types.err; let pair = class_field_map.get(&field.ident.node.name).map(|x| *x); @@ -3273,7 +3273,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, assert!(fields_found <= field_types.len()); if fields_found < field_types.len() { let mut missing_fields = Vec::new(); - for class_field in field_types.iter() { + for class_field in field_types { let name = class_field.name; let (_, seen) = class_field_map[name]; if !seen { @@ -3374,7 +3374,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, // Make sure to still write the types // otherwise we might ICE fcx.write_error(id); - for field in fields.iter() { + for field in fields { check_expr(fcx, &*field.expr); } match *base_expr { @@ -3628,10 +3628,10 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, constrain_path_type_parameters(fcx, expr); } ast::ExprInlineAsm(ref ia) => { - for &(_, ref input) in ia.inputs.iter() { + for &(_, ref input) in &ia.inputs { check_expr(fcx, &**input); } - for &(_, ref out, _) in ia.outputs.iter() { + for &(_, ref out, _) in &ia.outputs { check_expr(fcx, &**out); } fcx.write_nil(id); @@ -3764,14 +3764,14 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let typ = match uty { Some(uty) => { - for e in args.iter() { + for e in args { check_expr_coercable_to_type(fcx, &**e, uty); } uty } None => { let t: Ty = fcx.infcx().next_ty_var(); - for e in args.iter() { + for e in args { check_expr_has_type(fcx, &**e, t); } t @@ -4270,7 +4270,7 @@ fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, let mut warned = false; let mut any_diverges = false; let mut any_err = false; - for s in blk.stmts.iter() { + for s in &blk.stmts { check_stmt(fcx, &**s); let s_id = ast_util::stmt_id(&**s); let s_ty = fcx.node_ty(s_id); @@ -4506,7 +4506,7 @@ pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, let mut disr_vals: Vec<ty::Disr> = Vec::new(); let mut prev_disr_val: Option<ty::Disr> = None; - for v in vs.iter() { + for v in vs { // If the discriminant value is specified explicitly in the enum check whether the // initialization expression is valid, otherwise use the last value plus one. @@ -4838,7 +4838,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // variables. If the user provided some types, we may still need // to add defaults. If the user provided *too many* types, that's // a problem. - for &space in ParamSpace::all().iter() { + for &space in &ParamSpace::all() { adjust_type_parameters(fcx, span, space, type_defs, &mut substs); assert_eq!(substs.types.len(space), type_defs.len(space)); @@ -4870,13 +4870,13 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, fcx: &FnCtxt, segment: &ast::PathSegment) { - for typ in segment.parameters.types().iter() { + for typ in &segment.parameters.types() { span_err!(fcx.tcx().sess, typ.span, E0085, "type parameters may not appear here"); break; } - for lifetime in segment.parameters.lifetimes().iter() { + for lifetime in &segment.parameters.lifetimes() { span_err!(fcx.tcx().sess, lifetime.span, E0086, "lifetime parameters may not appear here"); break; diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index f8c7055a003..816edd92bf9 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -148,7 +148,7 @@ pub fn regionck_ensure_component_tys_wf<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, span: Span, component_tys: &[Ty<'tcx>]) { let mut rcx = Rcx::new(fcx, Repeating(0), SubjectNode::None); - for &component_ty in component_tys.iter() { + for &component_ty in component_tys { // Check that each type outlives the empty region. Since the // empty region is a subregion of all others, this can't fail // unless the type does not meet the well-formedness @@ -298,7 +298,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { .region_obligations(node_id) .to_vec(); - for r_o in region_obligations.iter() { + for r_o in ®ion_obligations { debug!("visit_region_obligations: r_o={}", r_o.repr(self.tcx())); let sup_type = self.resolve_type(r_o.sup_type); @@ -327,7 +327,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { debug!("relate_free_regions >>"); let tcx = self.tcx(); - for &ty in fn_sig_tys.iter() { + for &ty in fn_sig_tys { let ty = self.resolve_type(ty); debug!("relate_free_regions(t={})", ty.repr(tcx)); let body_scope = CodeExtent::from_node_id(body_id); @@ -337,7 +337,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { tcx, ty, body_scope); - for constraint in constraints.iter() { + for constraint in &constraints { debug!("constraint: {}", constraint.repr(tcx)); match *constraint { regionmanip::RegionSubRegionConstraint(_, @@ -424,7 +424,7 @@ fn visit_block(rcx: &mut Rcx, b: &ast::Block) { fn visit_arm(rcx: &mut Rcx, arm: &ast::Arm) { // see above - for p in arm.pats.iter() { + for p in &arm.pats { constrain_bindings_in_pat(&**p, rcx); } @@ -487,13 +487,13 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { let has_method_map = rcx.fcx.inh.method_map.borrow().contains_key(&method_call); // Check any autoderefs or autorefs that appear. - for &adjustment in rcx.fcx.inh.adjustments.borrow().get(&expr.id).iter() { + if let Some(adjustment) = rcx.fcx.inh.adjustments.borrow().get(&expr.id) { debug!("adjustment={:?}", adjustment); match *adjustment { ty::AdjustDerefRef(ty::AutoDerefRef {autoderefs, autoref: ref opt_autoref}) => { let expr_ty = rcx.resolve_node_type(expr.id); constrain_autoderefs(rcx, expr, autoderefs, expr_ty); - for autoref in opt_autoref.iter() { + if let Some(ref autoref) = *opt_autoref { link_autoref(rcx, expr, autoderefs, autoref); // Require that the resulting region encompasses @@ -753,7 +753,7 @@ fn check_expr_fn_block(rcx: &mut Rcx, debug!("ensure_free_variable_types_outlive_closure_bound({}, {})", bounds.region_bound.repr(tcx), expr.repr(tcx)); - for freevar in freevars.iter() { + for freevar in freevars { let var_node_id = { let def_id = freevar.def.def_id(); assert!(def_id.krate == ast::LOCAL_CRATE); @@ -779,7 +779,7 @@ fn check_expr_fn_block(rcx: &mut Rcx, }; // Check that the type meets the criteria of the existential bounds: - for builtin_bound in bounds.builtin_bounds.iter() { + for builtin_bound in &bounds.builtin_bounds { let code = traits::ClosureCapture(var_node_id, expr.span, builtin_bound); let cause = traits::ObligationCause::new(freevar.span, rcx.fcx.body_id, code); rcx.fcx.register_builtin_bound(var_ty, builtin_bound, cause); @@ -802,7 +802,7 @@ fn check_expr_fn_block(rcx: &mut Rcx, let tcx = rcx.fcx.ccx.tcx; debug!("constrain_captured_variables({}, {})", region_bound.repr(tcx), expr.repr(tcx)); - for freevar in freevars.iter() { + for freevar in freevars { debug!("constrain_captured_variables: freevar.def={:?}", freevar.def); // Identify the variable being closed over and its node-id. @@ -897,13 +897,13 @@ fn constrain_call<'a, I: Iterator<Item=&'a ast::Expr>>(rcx: &mut Rcx, } // as loop above, but for receiver - for r in receiver.iter() { + if let Some(r) = receiver { debug!("receiver: {}", r.repr(tcx)); type_of_node_must_outlive( rcx, infer::CallRcvr(r.span), r.id, callee_region); if implicitly_ref_args { - link_by_ref(rcx, &**r, callee_scope); + link_by_ref(rcx, &*r, callee_scope); } } } @@ -1079,8 +1079,8 @@ fn link_match(rcx: &Rcx, discr: &ast::Expr, arms: &[ast::Arm]) { let mc = mc::MemCategorizationContext::new(rcx.fcx); let discr_cmt = ignore_err!(mc.cat_expr(discr)); debug!("discr_cmt={}", discr_cmt.repr(rcx.tcx())); - for arm in arms.iter() { - for root_pat in arm.pats.iter() { + for arm in arms { + for root_pat in &arm.pats { link_pattern(rcx, mc, discr_cmt.clone(), &**root_pat); } } @@ -1092,7 +1092,7 @@ fn link_match(rcx: &Rcx, discr: &ast::Expr, arms: &[ast::Arm]) { fn link_fn_args(rcx: &Rcx, body_scope: CodeExtent, args: &[ast::Arg]) { debug!("regionck::link_fn_args(body_scope={:?})", body_scope); let mc = mc::MemCategorizationContext::new(rcx.fcx); - for arg in args.iter() { + for arg in args { let arg_ty = rcx.fcx.node_ty(arg.id); let re_scope = ty::ReScope(body_scope); let arg_cmt = mc.cat_rvalue(arg.id, arg.ty.span, re_scope, arg_ty); @@ -1418,7 +1418,7 @@ fn type_must_outlive<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, rcx.tcx(), ty, region); - for constraint in constraints.iter() { + for constraint in &constraints { debug!("constraint: {}", constraint.repr(rcx.tcx())); match *constraint { regionmanip::RegionSubRegionConstraint(None, r_a, r_b) => { @@ -1479,7 +1479,7 @@ fn generic_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, // The problem is that the type of `x` is `&'a A`. To be // well-formed, then, A must be lower-generic by `'a`, but we // don't know that this holds from first principles. - for &(ref r, ref p) in rcx.region_bound_pairs.iter() { + for &(ref r, ref p) in &rcx.region_bound_pairs { debug!("generic={} p={}", generic.repr(rcx.tcx()), p.repr(rcx.tcx())); diff --git a/src/librustc_typeck/check/regionmanip.rs b/src/librustc_typeck/check/regionmanip.rs index cbd51a880ce..4a0e2acc854 100644 --- a/src/librustc_typeck/check/regionmanip.rs +++ b/src/librustc_typeck/check/regionmanip.rs @@ -126,7 +126,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { } ty::ty_tup(ref tuptys) => { - for &tupty in tuptys.iter() { + for &tupty in tuptys { self.accumulate_from_ty(tupty); } } @@ -236,7 +236,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { // Variance of each type/region parameter. let variances = ty::item_variances(self.tcx, def_id); - for &space in ParamSpace::all().iter() { + for &space in &ParamSpace::all() { let region_params = substs.regions().get_slice(space); let region_variances = variances.regions.get_slice(space); let region_param_defs = generics.regions.get_slice(space); @@ -272,7 +272,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { } } - for ®ion_bound in region_param_def.bounds.iter() { + for ®ion_bound in ®ion_param_def.bounds { // The type declared a constraint like // // 'b : 'a @@ -314,7 +314,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { // Inspect bounds on this type parameter for any // region bounds. - for &r in type_param_def.bounds.region_bounds.iter() { + for &r in &type_param_def.bounds.region_bounds { self.stack.push((r, Some(ty))); self.accumulate_from_ty(type_param_ty); self.stack.pop().unwrap(); @@ -368,7 +368,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { // And then, in turn, to be well-formed, the // `region_bound` that user specified must imply the // region bounds required from all of the trait types: - for &r_d in required_region_bounds.iter() { + for &r_d in &required_region_bounds { // Each of these is an instance of the `'c <= 'b` // constraint above self.out.push(RegionSubRegionConstraint(Some(ty), r_d, r_c)); diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index f7babadd41f..b52e01f9a7a 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -138,7 +138,7 @@ impl<'a,'tcx> SeedBorrowKind<'a,'tcx> { } ty::with_freevars(self.tcx(), expr.id, |freevars| { - for freevar in freevars.iter() { + for freevar in freevars { let var_node_id = freevar.def.local_node_id(); let upvar_id = ty::UpvarId { var_id: var_node_id, closure_expr_id: expr.id }; diff --git a/src/librustc_typeck/check/vtable.rs b/src/librustc_typeck/check/vtable.rs index 5cf71a9be6a..43910a937e8 100644 --- a/src/librustc_typeck/check/vtable.rs +++ b/src/librustc_typeck/check/vtable.rs @@ -221,7 +221,7 @@ pub fn register_object_cast_obligations<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // bounds attached to the object cast. (In other words, if the // object type is Foo+Send, this would create an obligation // for the Send check.) - for builtin_bound in object_trait.bounds.builtin_bounds.iter() { + for builtin_bound in &object_trait.bounds.builtin_bounds { fcx.register_builtin_bound( referent_ty, builtin_bound, @@ -231,7 +231,7 @@ pub fn register_object_cast_obligations<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // Create obligations for the projection predicates. let projection_bounds = object_trait.projection_bounds_with_self_ty(fcx.tcx(), referent_ty); - for projection_bound in projection_bounds.iter() { + for projection_bound in &projection_bounds { let projection_obligation = Obligation::new(cause.clone(), projection_bound.as_predicate()); fcx.register_predicate(projection_obligation); @@ -263,7 +263,7 @@ fn check_object_type_binds_all_associated_types<'tcx>(tcx: &ty::ctxt<'tcx>, }) .collect(); - for projection_bound in object_trait.bounds.projection_bounds.iter() { + for projection_bound in &object_trait.bounds.projection_bounds { let pair = (projection_bound.0.projection_ty.trait_ref.def_id, projection_bound.0.projection_ty.item_name); associated_types.remove(&pair); diff --git a/src/librustc_typeck/check/wf.rs b/src/librustc_typeck/check/wf.rs index db226295cd9..24153fd94ea 100644 --- a/src/librustc_typeck/check/wf.rs +++ b/src/librustc_typeck/check/wf.rs @@ -147,15 +147,15 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { item.span, region::CodeExtent::from_node_id(item.id), Some(&mut this.cache)); - for variant in variants.iter() { - for field in variant.fields.iter() { + for variant in &variants { + for field in &variant.fields { // Regions are checked below. bounds_checker.check_traits_in_ty(field.ty); } // For DST, all intermediate types must be sized. if variant.fields.len() > 0 { - for field in variant.fields.init().iter() { + for field in variant.fields.init() { fcx.register_builtin_bound( field.ty, ty::BoundSized, @@ -323,7 +323,7 @@ fn reject_shadowing_type_parameters<'tcx>(tcx: &ty::ctxt<'tcx>, let impl_params = generics.types.get_slice(subst::TypeSpace).iter() .map(|tp| tp.name).collect::<HashSet<_>>(); - for method_param in generics.types.get_slice(subst::FnSpace).iter() { + for method_param in generics.types.get_slice(subst::FnSpace) { if impl_params.contains(&method_param.name) { span_err!(tcx.sess, span, E0194, "type parameter `{}` shadows another type parameter of the same name", diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 0eaecf8ac05..52b1eb490cc 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -49,7 +49,7 @@ pub fn resolve_type_vars_in_fn(fcx: &FnCtxt, assert_eq!(fcx.writeback_errors.get(), false); let mut wbcx = WritebackCx::new(fcx); wbcx.visit_block(blk); - for arg in decl.inputs.iter() { + for arg in &decl.inputs { wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.id); wbcx.visit_pat(&*arg.pat); @@ -119,7 +119,7 @@ impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { match e.node { ast::ExprClosure(_, _, ref decl, _) => { - for input in decl.inputs.iter() { + for input in &decl.inputs { let _ = self.visit_node_id(ResolvingExpr(e.span), input.id); } @@ -182,7 +182,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { return; } - for (upvar_id, upvar_capture) in self.fcx.inh.upvar_capture_map.borrow().iter() { + for (upvar_id, upvar_capture) in &*self.fcx.inh.upvar_capture_map.borrow() { let new_upvar_capture = match *upvar_capture { ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue, ty::UpvarCapture::ByRef(ref upvar_borrow) => { @@ -204,12 +204,12 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { return } - for (def_id, closure_ty) in self.fcx.inh.closure_tys.borrow().iter() { + for (def_id, closure_ty) in &*self.fcx.inh.closure_tys.borrow() { let closure_ty = self.resolve(closure_ty, ResolvingClosure(*def_id)); self.fcx.tcx().closure_tys.borrow_mut().insert(*def_id, closure_ty); } - for (def_id, &closure_kind) in self.fcx.inh.closure_kinds.borrow().iter() { + for (def_id, &closure_kind) in &*self.fcx.inh.closure_kinds.borrow() { self.fcx.tcx().closure_kinds.borrow_mut().insert(*def_id, closure_kind); } } diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 9760d5f05df..ed340b0882c 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -136,7 +136,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { // the tcx. let mut tcx_inherent_impls = self.crate_context.tcx.inherent_impls.borrow_mut(); - for (k, v) in self.inherent_impls.borrow().iter() { + for (k, v) in &*self.inherent_impls.borrow() { tcx_inherent_impls.insert((*k).clone(), Rc::new((*v.borrow()).clone())); } @@ -167,7 +167,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { let impl_items = self.create_impl_from_item(item); - for associated_trait in associated_traits.iter() { + for associated_trait in associated_traits { let trait_ref = ty::node_id_to_trait_ref(self.crate_context.tcx, associated_trait.ref_id); debug!("(checking implementation) adding impl for trait '{}', item '{}'", @@ -215,7 +215,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { let impl_type_scheme = ty::lookup_item_type(tcx, impl_id); let prov = ty::provided_trait_methods(tcx, trait_ref.def_id); - for trait_method in prov.iter() { + for trait_method in &prov { // Synthesize an ID. let new_id = tcx.sess.next_node_id(); let new_did = local_def(new_id); @@ -303,7 +303,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { } }).collect(); - for trait_ref in trait_refs.iter() { + if let Some(ref trait_ref) = *trait_refs { let ty_trait_ref = ty::node_id_to_trait_ref( self.crate_context.tcx, trait_ref.ref_id); @@ -345,17 +345,17 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { assert!(associated_traits.is_some()); // Record all the trait items. - for trait_ref in associated_traits.iter() { + if let Some(trait_ref) = associated_traits { self.add_trait_impl(trait_ref.def_id, impl_def_id); } // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. - for item_def_id in impl_items.iter() { + for item_def_id in &impl_items { let impl_item = ty::impl_or_trait_item(tcx, item_def_id.def_id()); match impl_item { ty::MethodTraitItem(ref method) => { - for &source in method.provided_source.iter() { + if let Some(source) = method.provided_source { tcx.provided_method_sources .borrow_mut() .insert(item_def_id.def_id(), source); @@ -398,7 +398,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { Some(found_impls) => found_impls }; - for &impl_did in trait_impls.borrow().iter() { + for &impl_did in &*trait_impls.borrow() { let items = &(*impl_items)[impl_did]; if items.len() < 1 { // We'll error out later. For now, just don't ICE. @@ -465,7 +465,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { // Clone first to avoid a double borrow error. let trait_impls = trait_impls.borrow().clone(); - for &impl_did in trait_impls.iter() { + for &impl_did in &trait_impls { debug!("check_implementations_of_copy: impl_did={}", impl_did.repr(tcx)); @@ -559,7 +559,7 @@ fn subst_receiver_types_in_method_ty<'tcx>(tcx: &ty::ctxt<'tcx>, // replace the type parameters declared on the trait with those // from the impl - for &space in [subst::TypeSpace, subst::SelfSpace].iter() { + for &space in &[subst::TypeSpace, subst::SelfSpace] { method_generics.types.replace( space, impl_type_scheme.generics.types.get_slice(space).to_vec()); diff --git a/src/librustc_typeck/coherence/overlap.rs b/src/librustc_typeck/coherence/overlap.rs index a7bad3dc789..403dcf1e25a 100644 --- a/src/librustc_typeck/coherence/overlap.rs +++ b/src/librustc_typeck/coherence/overlap.rs @@ -46,7 +46,7 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { (k, v.borrow().clone()) }).collect(); - for &(trait_def_id, ref impls) in trait_def_ids.iter() { + for &(trait_def_id, ref impls) in &trait_def_ids { self.check_for_overlapping_impls_of_trait(trait_def_id, impls); } } @@ -65,7 +65,7 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { continue; } - for &impl2_def_id in trait_impls[(i+1)..].iter() { + for &impl2_def_id in &trait_impls[(i+1)..] { self.check_if_impls_overlap(trait_def_id, impl1_def_id, impl2_def_id); diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index ed33ddd458a..6d92343d332 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -203,7 +203,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, let tcx = ccx.tcx; // Create a set of parameter types shared among all the variants. - for variant in variants.iter() { + for variant in variants { let variant_def_id = local_def(variant.node.id); // Nullary enum constructors get turned into constants; n-ary enum @@ -249,7 +249,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, if let ast::ItemTrait(_, _, _, ref trait_items) = item.node { // For each method, construct a suitable ty::Method and // store it into the `tcx.impl_or_trait_items` table: - for trait_item in trait_items.iter() { + for trait_item in trait_items { match *trait_item { ast::RequiredMethod(_) | ast::ProvidedMethod(_) => { @@ -527,8 +527,8 @@ fn ensure_no_ty_param_bounds(ccx: &CollectCtxt, thing: &'static str) { let mut warn = false; - for ty_param in generics.ty_params.iter() { - for bound in ty_param.bounds.iter() { + for ty_param in &*generics.ty_params { + for bound in &*ty_param.bounds { match *bound { ast::TraitTyParamBound(..) => { warn = true; @@ -596,7 +596,7 @@ fn convert(ccx: &CollectCtxt, it: &ast::Item) { }; let mut methods = Vec::new(); - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref method) => { let body_id = method.pe_body().id; @@ -644,7 +644,7 @@ fn convert(ccx: &CollectCtxt, it: &ast::Item) { &ty_generics, parent_visibility); - for trait_ref in opt_trait_ref.iter() { + if let Some(ref trait_ref) = *opt_trait_ref { astconv::instantiate_trait_ref(ccx, &ExplicitRscope, trait_ref, @@ -663,7 +663,7 @@ fn convert(ccx: &CollectCtxt, it: &ast::Item) { it.ident.repr(ccx.tcx), trait_def.repr(ccx.tcx)); - for trait_method in trait_methods.iter() { + for trait_method in trait_methods { let self_type = ty::mk_self_type(tcx); match *trait_method { ast::RequiredMethod(ref type_method) => { @@ -1168,7 +1168,7 @@ fn add_unsized_bound<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, { // Try to find an unbound in bounds. let mut unbound = None; - for ab in ast_bounds.iter() { + for ab in ast_bounds { if let &ast::TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = ab { if unbound.is_none() { assert!(ptr.bound_lifetimes.is_empty()); @@ -1249,12 +1249,12 @@ fn ty_generics<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, create_predicates(ccx.tcx, &mut result, space); // Add the bounds not associated with a type parameter - for predicate in where_clause.predicates.iter() { + for predicate in &where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ref bound_pred) => { let ty = ast_ty_to_ty(ccx, &ExplicitRscope, &*bound_pred.bounded_ty); - for bound in bound_pred.bounds.iter() { + for bound in &*bound_pred.bounds { match bound { &ast::TyParamBound::TraitTyParamBound(ref poly_trait_ref, _) => { let mut projections = Vec::new(); @@ -1269,7 +1269,7 @@ fn ty_generics<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, result.predicates.push(space, trait_ref.as_predicate()); - for projection in projections.iter() { + for projection in &projections { result.predicates.push(space, projection.as_predicate()); } } @@ -1285,7 +1285,7 @@ fn ty_generics<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, &ast::WherePredicate::RegionPredicate(ref region_pred) => { let r1 = ast_region_to_region(ccx.tcx, ®ion_pred.lifetime); - for bound in region_pred.bounds.iter() { + for bound in ®ion_pred.bounds { let r2 = ast_region_to_region(ccx.tcx, bound); let pred = ty::Binder(ty::OutlivesPredicate(r1, r2)); result.predicates.push(space, ty::Predicate::RegionOutlives(pred)) @@ -1308,16 +1308,16 @@ fn ty_generics<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, result: &mut ty::Generics<'tcx>, space: subst::ParamSpace) { - for type_param_def in result.types.get_slice(space).iter() { + for type_param_def in result.types.get_slice(space) { let param_ty = ty::mk_param_from_def(tcx, type_param_def); for predicate in ty::predicates(tcx, param_ty, &type_param_def.bounds).into_iter() { result.predicates.push(space, predicate); } } - for region_param_def in result.regions.get_slice(space).iter() { + for region_param_def in result.regions.get_slice(space) { let region = region_param_def.to_early_bound_region(); - for &bound_region in region_param_def.bounds.iter() { + for &bound_region in ®ion_param_def.bounds { // account for new binder introduced in the predicate below; no need // to shift `region` because it is never a late-bound region let bound_region = ty_fold::shift_region(bound_region, 1); @@ -1480,7 +1480,7 @@ fn ty_of_foreign_fn_decl<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast_generics: &ast::Generics, abi: abi::Abi) -> ty::TypeScheme<'tcx> { - for i in decl.inputs.iter() { + for i in &decl.inputs { match (*i).pat.node { ast::PatIdent(_, _, _) => (), ast::PatWild(ast::PatWildSingle) => (), diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs index 6c5950e4df5..40197ee2c49 100644 --- a/src/librustc_typeck/variance.rs +++ b/src/librustc_typeck/variance.rs @@ -499,12 +499,12 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> { // `ty::VariantInfo::from_ast_variant()` ourselves // here, mainly so as to mask the differences between // struct-like enums and so forth. - for ast_variant in enum_definition.variants.iter() { + for ast_variant in &enum_definition.variants { let variant = ty::VariantInfo::from_ast_variant(tcx, &**ast_variant, /*discriminant*/ 0); - for arg_ty in variant.args.iter() { + for arg_ty in &variant.args { self.add_constraints_from_ty(generics, *arg_ty, self.covariant); } } @@ -513,7 +513,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> { ast::ItemStruct(..) => { let generics = &ty::lookup_item_type(tcx, did).generics; let struct_fields = ty::lookup_struct_fields(tcx, did); - for field_info in struct_fields.iter() { + for field_info in &struct_fields { assert_eq!(field_info.id.krate, ast::LOCAL_CRATE); let field_ty = ty::node_id_to_type(tcx, field_info.id.node); self.add_constraints_from_ty(generics, field_ty, self.covariant); @@ -522,7 +522,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> { ast::ItemTrait(..) => { let trait_items = ty::trait_items(tcx, did); - for trait_item in trait_items.iter() { + for trait_item in &*trait_items { match *trait_item { ty::MethodTraitItem(ref method) => { self.add_constraints_from_sig(&method.generics, @@ -759,7 +759,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } ty::ty_tup(ref subtys) => { - for &subty in subtys.iter() { + for &subty in subtys { self.add_constraints_from_ty(generics, subty, variance); } } @@ -821,7 +821,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { let projections = data.projection_bounds_with_self_ty(self.tcx(), self.tcx().types.err); - for projection in projections.iter() { + for projection in &projections { self.add_constraints_from_ty(generics, projection.0.ty, self.invariant); } } @@ -866,7 +866,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { variance: VarianceTermPtr<'a>) { debug!("add_constraints_from_substs(def_id={:?})", def_id); - for p in type_param_defs.iter() { + for p in type_param_defs { let variance_decl = self.declared_variance(p.def_id, def_id, TypeParam, p.space, p.index as uint); @@ -875,7 +875,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraints_from_ty(generics, substs_ty, variance_i); } - for p in region_param_defs.iter() { + for p in region_param_defs { let variance_decl = self.declared_variance(p.def_id, def_id, RegionParam, p.space, p.index as uint); @@ -892,7 +892,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { sig: &ty::PolyFnSig<'tcx>, variance: VarianceTermPtr<'a>) { let contra = self.contravariant(variance); - for &input in sig.0.inputs.iter() { + for &input in &sig.0.inputs { self.add_constraints_from_ty(generics, input, contra); } if let ty::FnConverging(result_type) = sig.0.output { @@ -990,7 +990,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { while changed { changed = false; - for constraint in self.constraints.iter() { + for constraint in &self.constraints { let Constraint { inferred, variance: term } = *constraint; let InferredIndex(inferred) = inferred; let variance = self.evaluate(term); diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index d5c03879438..b140e26e037 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -254,7 +254,7 @@ impl Item { /// Finds the `doc` attribute as a List and returns the list of attributes /// nested inside. pub fn doc_list<'a>(&'a self) -> Option<&'a [Attribute]> { - for attr in self.attrs.iter() { + for attr in &self.attrs { match *attr { List(ref x, ref list) if "doc" == *x => { return Some(list.as_slice()); @@ -268,7 +268,7 @@ impl Item { /// Finds the `doc` attribute as a NameValue and returns the corresponding /// value found. pub fn doc_value<'a>(&'a self) -> Option<&'a str> { - for attr in self.attrs.iter() { + for attr in &self.attrs { match *attr { NameValue(ref x, ref v) if "doc" == *x => { return Some(v.as_slice()); @@ -281,8 +281,8 @@ impl Item { pub fn is_hidden_from_doc(&self) -> bool { match self.doc_list() { - Some(ref l) => { - for innerattr in l.iter() { + Some(l) => { + for innerattr in l { match *innerattr { Word(ref s) if "hidden" == *s => { return true @@ -508,12 +508,12 @@ impl<'tcx> Clean<(Vec<TyParamBound>, Vec<TypeBinding>)> for ty::ExistentialBound fn clean(&self, cx: &DocContext) -> (Vec<TyParamBound>, Vec<TypeBinding>) { let mut tp_bounds = vec![]; self.region_bound.clean(cx).map(|b| tp_bounds.push(RegionBound(b))); - for bb in self.builtin_bounds.iter() { + for bb in &self.builtin_bounds { tp_bounds.push(bb.clean(cx)); } let mut bindings = vec![]; - for &ty::Binder(ref pb) in self.projection_bounds.iter() { + for &ty::Binder(ref pb) in &self.projection_bounds { bindings.push(TypeBinding { name: pb.projection_ty.item_name.clean(cx), ty: pb.ty.clean(cx) @@ -636,10 +636,10 @@ impl<'tcx> Clean<TyParamBound> for ty::TraitRef<'tcx> { // collect any late bound regions let mut late_bounds = vec![]; - for &ty_s in self.substs.types.get_slice(ParamSpace::TypeSpace).iter() { + for &ty_s in self.substs.types.get_slice(ParamSpace::TypeSpace) { use rustc::middle::ty::{Region, sty}; if let sty::ty_tup(ref ts) = ty_s.sty { - for &ty_s in ts.iter() { + for &ty_s in ts { if let sty::ty_rptr(ref reg, _) = ty_s.sty { if let &Region::ReLateBound(_, _) = *reg { debug!(" hit an ReLateBound {:?}", reg); @@ -662,7 +662,7 @@ impl<'tcx> Clean<TyParamBound> for ty::TraitRef<'tcx> { impl<'tcx> Clean<Vec<TyParamBound>> for ty::ParamBounds<'tcx> { fn clean(&self, cx: &DocContext) -> Vec<TyParamBound> { let mut v = Vec::new(); - for t in self.trait_bounds.iter() { + for t in &self.trait_bounds { v.push(t.clean(cx)); } for r in self.region_bounds.iter().filter_map(|r| r.clean(cx)) { @@ -872,7 +872,7 @@ impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics<'tcx>, subst::ParamSpace) { Some(did) => did, None => return false }; - for bound in bounds.iter() { + for bound in bounds { if let TyParamBound::TraitBound(PolyTrait { trait_: Type::ResolvedPath { did, .. }, .. }, TBM::None) = *bound { @@ -915,7 +915,7 @@ impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics<'tcx>, subst::ParamSpace) { }).collect::<Vec<_>>(); // Finally, run through the type parameters again and insert a ?Sized unbound for // any we didn't find to be Sized. - for tp in stripped_typarams.iter() { + for tp in &stripped_typarams { if !sized_params.contains(&tp.name) { let mut sized_bound = ty::BuiltinBound::BoundSized.clean(cx); if let TyParamBound::TraitBound(_, ref mut tbm) = sized_bound { @@ -1420,12 +1420,12 @@ impl PrimitiveType { } fn find(attrs: &[Attribute]) -> Option<PrimitiveType> { - for attr in attrs.iter() { + for attr in attrs { let list = match *attr { List(ref k, ref l) if *k == "doc" => l, _ => continue, }; - for sub_attr in list.iter() { + for sub_attr in list { let value = match *sub_attr { NameValue(ref k, ref v) if *k == "primitive" => v.as_slice(), @@ -2175,7 +2175,7 @@ impl Clean<Vec<Item>> for doctree::Import { let mut ret = vec![]; let remaining = if !denied { let mut remaining = vec![]; - for path in list.iter() { + for path in list { match inline::try_inline(cx, path.node.id(), None) { Some(items) => { ret.extend(items.into_iter()); diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs index 79ca24a18d4..84e88158219 100644 --- a/src/librustdoc/externalfiles.rs +++ b/src/librustdoc/externalfiles.rs @@ -62,7 +62,7 @@ macro_rules! load_or_return { pub fn load_external_files(names: &[String]) -> Option<String> { let mut out = String::new(); - for name in names.iter() { + for name in names { out.push_str(load_or_return!(name.as_slice(), None, None).as_slice()); out.push('\n'); } diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index eb7402b67c4..e86c0e39714 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -212,21 +212,21 @@ impl fmt::Display for clean::PathParameters { if lifetimes.len() > 0 || types.len() > 0 || bindings.len() > 0 { try!(f.write_str("<")); let mut comma = false; - for lifetime in lifetimes.iter() { + for lifetime in lifetimes { if comma { try!(f.write_str(", ")); } comma = true; try!(write!(f, "{}", *lifetime)); } - for ty in types.iter() { + for ty in types { if comma { try!(f.write_str(", ")); } comma = true; try!(write!(f, "{}", *ty)); } - for binding in bindings.iter() { + for binding in bindings { if comma { try!(f.write_str(", ")); } @@ -239,7 +239,7 @@ impl fmt::Display for clean::PathParameters { clean::PathParameters::Parenthesized { ref inputs, ref output } => { try!(f.write_str("(")); let mut comma = false; - for ty in inputs.iter() { + for ty in inputs { if comma { try!(f.write_str(", ")); } @@ -332,7 +332,7 @@ fn path<F, G>(w: &mut fmt::Formatter, match rel_root { Some(root) => { let mut root = String::from_str(root.as_slice()); - for seg in path.segments[..amt].iter() { + for seg in &path.segments[..amt] { if "super" == seg.name || "self" == seg.name { try!(write!(w, "{}::", seg.name)); @@ -347,7 +347,7 @@ fn path<F, G>(w: &mut fmt::Formatter, } } None => { - for seg in path.segments[..amt].iter() { + for seg in &path.segments[..amt] { try!(write!(w, "{}::", seg.name)); } } @@ -359,7 +359,7 @@ fn path<F, G>(w: &mut fmt::Formatter, Some((ref fqp, shortty)) if abs_root.is_some() => { let mut url = String::from_str(abs_root.unwrap().as_slice()); let to_link = &fqp[..fqp.len() - 1]; - for component in to_link.iter() { + for component in to_link { url.push_str(component.as_slice()); url.push_str("/"); } @@ -440,7 +440,7 @@ fn tybounds(w: &mut fmt::Formatter, typarams: &Option<Vec<clean::TyParamBound> >) -> fmt::Result { match *typarams { Some(ref params) => { - for param in params.iter() { + for param in params { try!(write!(w, " + ")); try!(write!(w, "{}", *param)); } @@ -770,7 +770,7 @@ impl fmt::Display for ModuleSummary { (100 * cnt.unmarked) as f64/tot as f64)); try!(write!(f, "</td></tr>")); - for submodule in m.submodules.iter() { + for submodule in &m.submodules { try!(fmt_inner(f, context, submodule)); } context.pop(); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 8a007fb035e..20ba0bb9e90 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -283,7 +283,7 @@ pub fn run(mut krate: clean::Crate, let default: &[_] = &[]; match krate.module.as_ref().map(|m| m.doc_list().unwrap_or(default)) { Some(attrs) => { - for attr in attrs.iter() { + for attr in attrs { match *attr { clean::NameValue(ref x, ref s) if "html_favicon_url" == *x => { @@ -353,7 +353,7 @@ pub fn run(mut krate: clean::Crate, krate = cache.fold_crate(krate); // Cache where all our extern crates are located - for &(n, ref e) in krate.externs.iter() { + for &(n, ref e) in &krate.externs { cache.extern_locations.insert(n, extern_location(e, &cx.dst)); let did = ast::DefId { krate: n, node: ast::CRATE_NODE_ID }; cache.paths.insert(did, (vec![e.name.to_string()], ItemType::Module)); @@ -364,11 +364,11 @@ pub fn run(mut krate: clean::Crate, // Favor linking to as local extern as possible, so iterate all crates in // reverse topological order. for &(n, ref e) in krate.externs.iter().rev() { - for &prim in e.primitives.iter() { + for &prim in &e.primitives { cache.primitive_locations.insert(prim, n); } } - for &prim in krate.primitives.iter() { + for &prim in &krate.primitives { cache.primitive_locations.insert(prim, ast::LOCAL_CRATE); } @@ -402,7 +402,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> old_io::IoResult<Stri // Attach all orphan methods to the type's definition if the type // has since been learned. - for &(pid, ref item) in orphan_methods.iter() { + for &(pid, ref item) in orphan_methods { let did = ast_util::local_def(pid); match paths.get(&did) { Some(&(ref fqp, _)) => { @@ -420,7 +420,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> old_io::IoResult<Stri // Reduce `NodeId` in paths into smaller sequential numbers, // and prune the paths that do not appear in the index. - for item in search_index.iter() { + for item in &*search_index { match item.parent { Some(nodeid) => { if !nodeid_to_pathid.contains_key(&nodeid) { @@ -542,7 +542,7 @@ fn write_shared(cx: &Context, let mut w = try!(File::create(&dst)); try!(writeln!(&mut w, "var searchIndex = {{}};")); try!(writeln!(&mut w, "{}", search_index)); - for index in all_indexes.iter() { + for index in &all_indexes { try!(writeln!(&mut w, "{}", *index)); } try!(writeln!(&mut w, "initSearch(searchIndex);")); @@ -550,7 +550,7 @@ fn write_shared(cx: &Context, // Update the list of all implementors for traits let dst = cx.dst.join("implementors"); try!(mkdir(&dst)); - for (&did, imps) in cache.implementors.iter() { + for (&did, imps) in &cache.implementors { // Private modules can leak through to this phase of rustdoc, which // could contain implementations for otherwise private types. In some // rare cases we could find an implementation for an item which wasn't @@ -564,7 +564,7 @@ fn write_shared(cx: &Context, }; let mut mydst = dst.clone(); - for part in remote_path[..remote_path.len() - 1].iter() { + for part in &remote_path[..remote_path.len() - 1] { mydst.push(part.as_slice()); try!(mkdir(&mydst)); } @@ -578,12 +578,12 @@ fn write_shared(cx: &Context, let mut f = BufferedWriter::new(try!(File::create(&mydst))); try!(writeln!(&mut f, "(function() {{var implementors = {{}};")); - for implementor in all_implementors.iter() { + for implementor in &all_implementors { try!(write!(&mut f, "{}", *implementor)); } try!(write!(&mut f, r"implementors['{}'] = [", krate.name)); - for imp in imps.iter() { + for imp in imps { // If the trait and implementation are in the same crate, then // there's no need to emit information about it (there's inlining // going on). If they're in different crates then the crate defining @@ -679,10 +679,10 @@ fn extern_location(e: &clean::ExternalCrate, dst: &Path) -> ExternalLocation { // Failing that, see if there's an attribute specifying where to find this // external crate - for attr in e.attrs.iter() { + for attr in &e.attrs { match *attr { clean::List(ref x, ref list) if "doc" == *x => { - for attr in list.iter() { + for attr in list { match *attr { clean::NameValue(ref x, ref s) if "html_root_url" == *x => { @@ -1043,7 +1043,7 @@ impl DocFolder for Cache { impl<'a> Cache { fn generics(&mut self, generics: &clean::Generics) { - for typ in generics.type_params.iter() { + for typ in &generics.type_params { self.typarams.insert(typ.did, typ.name.clone()); } } @@ -1190,7 +1190,7 @@ impl Context { .collect::<String>(); match cache().paths.get(&it.def_id) { Some(&(ref names, _)) => { - for name in (&names[..names.len() - 1]).iter() { + for name in &names[..names.len() - 1] { url.push_str(name.as_slice()); url.push_str("/"); } @@ -1252,7 +1252,7 @@ impl Context { fn build_sidebar(&self, m: &clean::Module) -> HashMap<String, Vec<NameDoc>> { let mut map = HashMap::new(); - for item in m.items.iter() { + for item in &m.items { if self.ignore_private_item(item) { continue } // avoid putting foreign items to the sidebar. @@ -1536,7 +1536,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, debug!("{:?}", indices); let mut curty = None; - for &idx in indices.iter() { + for &idx in &indices { let myitem = &items[idx]; let myty = Some(shortty(myitem)); @@ -1696,7 +1696,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, try!(write!(w, "{{ }}")); } else { try!(write!(w, "{{\n")); - for t in types.iter() { + for t in &types { try!(write!(w, " ")); try!(render_method(w, t.item())); try!(write!(w, ";\n")); @@ -1704,7 +1704,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, if types.len() > 0 && required.len() > 0 { try!(w.write_str("\n")); } - for m in required.iter() { + for m in &required { try!(write!(w, " ")); try!(render_method(w, m.item())); try!(write!(w, ";\n")); @@ -1712,7 +1712,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, if required.len() > 0 && provided.len() > 0 { try!(w.write_str("\n")); } - for m in provided.iter() { + for m in &provided { try!(write!(w, " ")); try!(render_method(w, m.item())); try!(write!(w, " {{ ... }}\n")); @@ -1741,7 +1741,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, <h2 id='associated-types'>Associated Types</h2> <div class='methods'> ")); - for t in types.iter() { + for t in &types { try!(trait_item(w, *t)); } try!(write!(w, "</div>")); @@ -1753,7 +1753,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, <h2 id='required-methods'>Required Methods</h2> <div class='methods'> ")); - for m in required.iter() { + for m in &required { try!(trait_item(w, *m)); } try!(write!(w, "</div>")); @@ -1763,7 +1763,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, <h2 id='provided-methods'>Provided Methods</h2> <div class='methods'> ")); - for m in provided.iter() { + for m in &provided { try!(trait_item(w, *m)); } try!(write!(w, "</div>")); @@ -1776,7 +1776,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, ")); match cache.implementors.get(&it.def_id) { Some(implementors) => { - for i in implementors.iter() { + for i in implementors { try!(writeln!(w, "<li>{}<code>impl{} {} for {}{}</code></li>", ConciseStability(&i.stability), i.generics, i.trait_, i.for_, WhereClause(&i.generics))); @@ -1890,7 +1890,7 @@ fn item_enum(w: &mut fmt::Formatter, it: &clean::Item, try!(write!(w, " {{}}")); } else { try!(write!(w, " {{\n")); - for v in e.variants.iter() { + for v in &e.variants { try!(write!(w, " ")); let name = v.name.as_ref().unwrap().as_slice(); match v.inner { @@ -1933,7 +1933,7 @@ fn item_enum(w: &mut fmt::Formatter, it: &clean::Item, try!(document(w, it)); if e.variants.len() > 0 { try!(write!(w, "<h2 class='variants'>Variants</h2>\n<table>")); - for variant in e.variants.iter() { + for variant in &e.variants { try!(write!(w, "<tr><td id='variant.{name}'>{stab}<code>{name}</code></td><td>", stab = ConciseStability(&variant.stability), name = variant.name.as_ref().unwrap().as_slice())); @@ -1996,7 +1996,7 @@ fn render_struct(w: &mut fmt::Formatter, it: &clean::Item, doctree::Plain => { try!(write!(w, " {{\n{}", tab)); let mut fields_stripped = false; - for field in fields.iter() { + for field in fields { match field.inner { clean::StructFieldItem(clean::HiddenStructField) => { fields_stripped = true; @@ -2049,7 +2049,7 @@ fn render_methods(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result { .partition(|i| i.impl_.trait_.is_none()); if non_trait.len() > 0 { try!(write!(w, "<h2 id='methods'>Methods</h2>")); - for i in non_trait.iter() { + for i in &non_trait { try!(render_impl(w, i)); } } @@ -2058,13 +2058,13 @@ fn render_methods(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result { Implementations</h2>")); let (derived, manual): (Vec<_>, _) = traits.into_iter() .partition(|i| i.impl_.derived); - for i in manual.iter() { + for i in &manual { try!(render_impl(w, i)); } if derived.len() > 0 { try!(write!(w, "<h3 id='derived_implementations'>Derived Implementations \ </h3>")); - for i in derived.iter() { + for i in &derived { try!(render_impl(w, i)); } } @@ -2137,14 +2137,14 @@ fn render_impl(w: &mut fmt::Formatter, i: &Impl) -> fmt::Result { } try!(write!(w, "<div class='impl-items'>")); - for trait_item in i.impl_.items.iter() { + for trait_item in &i.impl_.items { try!(doctraititem(w, trait_item, true)); } fn render_default_methods(w: &mut fmt::Formatter, t: &clean::Trait, i: &clean::Impl) -> fmt::Result { - for trait_item in t.items.iter() { + for trait_item in &t.items { let n = trait_item.item().name.clone(); match i.items.iter().find(|m| { m.name == n }) { Some(..) => continue, @@ -2209,7 +2209,7 @@ impl<'a> fmt::Display for Sidebar<'a> { None => return Ok(()) }; try!(write!(w, "<div class='block {}'><h2>{}</h2>", short, longty)); - for &NameDoc(ref name, ref doc) in items.iter() { + for &NameDoc(ref name, ref doc) in items { let curty = shortty(cur).to_static_str(); let class = if cur.name.as_ref().unwrap() == name && short == curty { "current" } else { "" }; diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs index 9143baf9ed9..7790c7d6a50 100644 --- a/src/librustdoc/html/toc.rs +++ b/src/librustdoc/html/toc.rs @@ -185,7 +185,7 @@ impl fmt::Debug for Toc { impl fmt::Display for Toc { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "<ul>")); - for entry in self.entries.iter() { + for entry in &self.entries { // recursively format this table of contents (the // `{children}` is the key). try!(write!(fmt, diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 29e52d627cd..9efd7cfb2e2 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -197,11 +197,11 @@ pub fn main_args(args: &[String]) -> int { if matches.opt_strs("passes") == ["list"] { println!("Available passes for running rustdoc:"); - for &(name, _, description) in PASSES.iter() { + for &(name, _, description) in PASSES { println!("{:>20} - {}", name, description); } println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970 - for &name in DEFAULT_PASSES.iter() { + for &name in DEFAULT_PASSES { println!("{:>20}", name); } return 0; @@ -217,7 +217,7 @@ pub fn main_args(args: &[String]) -> int { let input = matches.free[0].as_slice(); let mut libs = SearchPaths::new(); - for s in matches.opt_strs("L").iter() { + for s in &matches.opt_strs("L") { libs.add_path(s.as_slice()); } let externs = match parse_externs(&matches) { @@ -319,7 +319,7 @@ fn acquire_input(input: &str, /// error message. fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> { let mut externs = HashMap::new(); - for arg in matches.opt_strs("extern").iter() { + for arg in &matches.opt_strs("extern") { let mut parts = arg.splitn(1, '='); let name = match parts.next() { Some(s) => s, @@ -353,7 +353,7 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche // First, parse the crate and extract all relevant information. let mut paths = SearchPaths::new(); - for s in matches.opt_strs("L").iter() { + for s in &matches.opt_strs("L") { paths.add_path(s.as_slice()); } let cfgs = matches.opt_strs("cfg"); @@ -383,7 +383,7 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche // with the passes which we are supposed to run. match krate.module.as_ref().unwrap().doc_list() { Some(nested) => { - for inner in nested.iter() { + for inner in nested { match *inner { clean::Word(ref x) if "no_default_passes" == *x => { @@ -417,7 +417,7 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche let path = matches.opt_str("plugin-path") .unwrap_or("/tmp/rustdoc/plugins".to_string()); let mut pm = plugins::PluginManager::new(Path::new(path)); - for pass in passes.iter() { + for pass in &passes { let plugin = match PASSES.iter() .position(|&(p, _, _)| { p == *pass diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs index 7f932b6ca21..365fb78cfae 100644 --- a/src/librustdoc/markdown.rs +++ b/src/librustdoc/markdown.rs @@ -47,7 +47,7 @@ pub fn render(input: &str, mut output: Path, matches: &getopts::Matches, output.set_extension("html"); let mut css = String::new(); - for name in matches.opt_strs("markdown-css").iter() { + for name in &matches.opt_strs("markdown-css") { let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name); css.push_str(s.as_slice()) } diff --git a/src/librustdoc/passes.rs b/src/librustdoc/passes.rs index 30b2ad810f1..e1c6bf1f4cf 100644 --- a/src/librustdoc/passes.rs +++ b/src/librustdoc/passes.rs @@ -255,7 +255,7 @@ pub fn unindent_comments(krate: clean::Crate) -> plugins::PluginResult { fn fold_item(&mut self, i: Item) -> Option<Item> { let mut i = i; let mut avec: Vec<clean::Attribute> = Vec::new(); - for attr in i.attrs.iter() { + for attr in &i.attrs { match attr { &clean::NameValue(ref x, ref s) if "doc" == *x => { @@ -280,7 +280,7 @@ pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult { fn fold_item(&mut self, i: Item) -> Option<Item> { let mut docstr = String::new(); let mut i = i; - for attr in i.attrs.iter() { + for attr in &i.attrs { match *attr { clean::NameValue(ref x, ref s) if "doc" == *x => { diff --git a/src/librustdoc/plugins.rs b/src/librustdoc/plugins.rs index 2b5595131d0..a2afba091f4 100644 --- a/src/librustdoc/plugins.rs +++ b/src/librustdoc/plugins.rs @@ -64,7 +64,7 @@ impl PluginManager { pub fn run_plugins(&self, krate: clean::Crate) -> (clean::Crate, Vec<PluginJson> ) { let mut out_json = Vec::new(); let mut krate = krate; - for &callback in self.callbacks.iter() { + for &callback in &self.callbacks { let (c, res) = callback(krate); krate = c; out_json.push(res); diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 848205f589e..5bcda778cbb 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -147,7 +147,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { om.vis = vis; om.stab = self.stability(id); om.id = id; - for i in m.items.iter() { + for i in &m.items { self.visit_item(&**i, None, &mut om); } om @@ -211,7 +211,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { if glob { match it.node { ast::ItemMod(ref m) => { - for i in m.items.iter() { + for i in &m.items { self.visit_item(&**i, None, om); } } diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs index 8b39d91ffae..d61d5b68462 100644 --- a/src/libserialize/collection_impls.rs +++ b/src/libserialize/collection_impls.rs @@ -74,7 +74,7 @@ impl< fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> { e.emit_map(self.len(), |e| { let mut i = 0; - for (key, val) in self.iter() { + for (key, val) in self { try!(e.emit_map_elt_key(i, |e| key.encode(e))); try!(e.emit_map_elt_val(i, |e| val.encode(e))); i += 1; @@ -107,7 +107,7 @@ impl< fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_seq(self.len(), |s| { let mut i = 0; - for e in self.iter() { + for e in self { try!(s.emit_seq_elt(i, |s| e.encode(s))); i += 1; } @@ -135,7 +135,7 @@ impl< > Encodable for EnumSet<T> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { let mut bits = 0; - for item in self.iter() { + for item in self { bits |= item.to_uint(); } s.emit_uint(bits) @@ -166,7 +166,7 @@ impl<K, V, S> Encodable for HashMap<K, V, S> fn encode<E: Encoder>(&self, e: &mut E) -> Result<(), E::Error> { e.emit_map(self.len(), |e| { let mut i = 0; - for (key, val) in self.iter() { + for (key, val) in self { try!(e.emit_map_elt_key(i, |e| key.encode(e))); try!(e.emit_map_elt_val(i, |e| val.encode(e))); i += 1; @@ -204,7 +204,7 @@ impl<T, S> Encodable for HashSet<T, S> fn encode<E: Encoder>(&self, s: &mut E) -> Result<(), E::Error> { s.emit_seq(self.len(), |s| { let mut i = 0; - for e in self.iter() { + for e in self { try!(s.emit_seq_elt(i, |s| e.encode(s))); i += 1; } diff --git a/src/libserialize/hex.rs b/src/libserialize/hex.rs index c9b6af26ce0..a34ae1087db 100644 --- a/src/libserialize/hex.rs +++ b/src/libserialize/hex.rs @@ -42,7 +42,7 @@ impl ToHex for [u8] { /// ``` fn to_hex(&self) -> String { let mut v = Vec::with_capacity(self.len() * 2); - for &byte in self.iter() { + for &byte in self { v.push(CHARS[(byte >> 4) as uint]); v.push(CHARS[(byte & 0xf) as uint]); } diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 8d8bd32ba77..f43f22ec57c 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -1051,7 +1051,7 @@ impl Json { /// Otherwise, it will return the Json value associated with the final key. pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{ let mut target = self; - for key in keys.iter() { + for key in keys { match target.find(*key) { Some(t) => { target = t; }, None => return None @@ -1069,7 +1069,7 @@ impl Json { match map.get(key) { Some(json_value) => Some(json_value), None => { - for (_, v) in map.iter() { + for (_, v) in map { match v.search(key) { x if x.is_some() => return x, _ => () @@ -1367,7 +1367,7 @@ impl Stack { // Used by Parser to insert StackElement::Key elements at the top of the stack. fn push_key(&mut self, key: string::String) { self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); - for c in key.as_bytes().iter() { + for c in key.as_bytes() { self.str_buffer.push(*c); } } @@ -2497,7 +2497,7 @@ impl<A: ToJson> ToJson for Vec<A> { impl<A: ToJson> ToJson for BTreeMap<string::String, A> { fn to_json(&self) -> Json { let mut d = BTreeMap::new(); - for (key, value) in self.iter() { + for (key, value) in self { d.insert((*key).clone(), value.to_json()); } Json::Object(d) @@ -2507,7 +2507,7 @@ impl<A: ToJson> ToJson for BTreeMap<string::String, A> { impl<A: ToJson> ToJson for HashMap<string::String, A> { fn to_json(&self) -> Json { let mut d = BTreeMap::new(); - for (key, value) in self.iter() { + for (key, value) in self { d.insert((*key).clone(), value.to_json()); } Json::Object(d) @@ -2670,7 +2670,7 @@ mod tests { fn mk_object(items: &[(string::String, Json)]) -> Json { let mut d = BTreeMap::new(); - for item in items.iter() { + for item in items { match *item { (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); }, } @@ -3044,7 +3044,7 @@ mod tests { ("\"\\u12ab\"", "\u{12ab}"), ("\"\\uAB12\"", "\u{AB12}")]; - for &(i, o) in s.iter() { + for &(i, o) in &s { let v: string::String = super::decode(i).unwrap(); assert_eq!(v, o); } diff --git a/src/libstd/ascii.rs b/src/libstd/ascii.rs index 9aa38e711e7..d3094cc745e 100644 --- a/src/libstd/ascii.rs +++ b/src/libstd/ascii.rs @@ -232,7 +232,7 @@ pub fn escape_default<F>(c: u8, mut f: F) where _ => { f(b'\\'); f(b'x'); - for &offset in [4u, 0u].iter() { + for &offset in &[4u, 0u] { match ((c as i32) >> offset) & 0xf { i @ 0 ... 9 => f(b'0' + (i as u8)), i => f(b'a' + (i as u8 - 10)), diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index 29b7d93399a..511fa86685d 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -1977,7 +1977,7 @@ mod test_map { let mut observed: u32 = 0; - for (k, v) in m.iter() { + for (k, v) in &m { assert_eq!(*v, *k * 2); observed |= 1 << *k; } @@ -2167,7 +2167,7 @@ mod test_map { let map: HashMap<int, int> = xs.iter().map(|&x| x).collect(); - for &(k, v) in xs.iter() { + for &(k, v) in &xs { assert_eq!(map.get(&k), Some(&v)); } } diff --git a/src/libstd/collections/hash/set.rs b/src/libstd/collections/hash/set.rs index 755235cbace..fef30a28208 100644 --- a/src/libstd/collections/hash/set.rs +++ b/src/libstd/collections/hash/set.rs @@ -1033,7 +1033,7 @@ mod test_set { assert!(a.insert(i)); } let mut observed: u32 = 0; - for k in a.iter() { + for k in &a { observed |= 1 << *k; } assert_eq!(observed, 0xFFFF_FFFF); @@ -1154,7 +1154,7 @@ mod test_set { let set: HashSet<int> = xs.iter().map(|&x| x).collect(); - for x in xs.iter() { + for x in &xs { assert!(set.contains(x)); } } @@ -1240,7 +1240,7 @@ mod test_set { assert_eq!(last_i, 49); } - for _ in s.iter() { panic!("s should be empty!"); } + for _ in &s { panic!("s should be empty!"); } // reset to try again. s.extend(1..100); diff --git a/src/libstd/old_io/extensions.rs b/src/libstd/old_io/extensions.rs index f429f731b7d..d729c2800ce 100644 --- a/src/libstd/old_io/extensions.rs +++ b/src/libstd/old_io/extensions.rs @@ -406,12 +406,12 @@ mod test { let uints = [0, 1, 2, 42, 10_123, 100_123_456, ::u64::MAX]; let mut writer = Vec::new(); - for i in uints.iter() { + for i in &uints { writer.write_le_u64(*i).unwrap(); } let mut reader = MemReader::new(writer); - for i in uints.iter() { + for i in &uints { assert!(reader.read_le_u64().unwrap() == *i); } } @@ -422,12 +422,12 @@ mod test { let uints = [0, 1, 2, 42, 10_123, 100_123_456, ::u64::MAX]; let mut writer = Vec::new(); - for i in uints.iter() { + for i in &uints { writer.write_be_u64(*i).unwrap(); } let mut reader = MemReader::new(writer); - for i in uints.iter() { + for i in &uints { assert!(reader.read_be_u64().unwrap() == *i); } } @@ -437,12 +437,12 @@ mod test { let ints = [::i32::MIN, -123456, -42, -5, 0, 1, ::i32::MAX]; let mut writer = Vec::new(); - for i in ints.iter() { + for i in &ints { writer.write_be_i32(*i).unwrap(); } let mut reader = MemReader::new(writer); - for i in ints.iter() { + for i in &ints { // this tests that the sign extension is working // (comparing the values as i32 would not test this) assert!(reader.read_be_int_n(4).unwrap() == *i as i64); diff --git a/src/libstd/old_io/fs.rs b/src/libstd/old_io/fs.rs index 0a9aeb849be..142f723ef71 100644 --- a/src/libstd/old_io/fs.rs +++ b/src/libstd/old_io/fs.rs @@ -1110,7 +1110,7 @@ mod test { } let files = check!(readdir(dir)); let mut mem = [0u8; 4]; - for f in files.iter() { + for f in &files { { let n = f.filestem_str(); check!(File::open(f).read(&mut mem)); diff --git a/src/libstd/old_io/net/addrinfo.rs b/src/libstd/old_io/net/addrinfo.rs index e37744f3aa3..2b7506b5c34 100644 --- a/src/libstd/old_io/net/addrinfo.rs +++ b/src/libstd/old_io/net/addrinfo.rs @@ -121,7 +121,7 @@ mod test { let ipaddrs = get_host_addresses("localhost").unwrap(); let mut found_local = false; let local_addr = &Ipv4Addr(127, 0, 0, 1); - for addr in ipaddrs.iter() { + for addr in &ipaddrs { found_local = found_local || addr == local_addr; } assert!(found_local); diff --git a/src/libstd/old_io/process.rs b/src/libstd/old_io/process.rs index f253f9799e9..79b29c31f6c 100644 --- a/src/libstd/old_io/process.rs +++ b/src/libstd/old_io/process.rs @@ -400,7 +400,7 @@ impl fmt::Debug for Command { /// character. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "{:?}", self.program)); - for arg in self.args.iter() { + for arg in &self.args { try!(write!(f, " '{:?}'", arg)); } Ok(()) @@ -1045,7 +1045,7 @@ mod tests { let output = String::from_utf8(prog.wait_with_output().unwrap().output).unwrap(); let r = os::env(); - for &(ref k, ref v) in r.iter() { + for &(ref k, ref v) in &r { // don't check windows magical empty-named variables assert!(k.is_empty() || output.contains(format!("{}={}", *k, *v).as_slice()), @@ -1063,7 +1063,7 @@ mod tests { let output = String::from_utf8(prog.wait_with_output().unwrap().output).unwrap(); let r = os::env(); - for &(ref k, ref v) in r.iter() { + for &(ref k, ref v) in &r { // don't check android RANDOM variables if *k != "RANDOM".to_string() { assert!(output.contains(format!("{}={}", diff --git a/src/libstd/old_io/test.rs b/src/libstd/old_io/test.rs index f49e2397d42..af829c2cfbb 100644 --- a/src/libstd/old_io/test.rs +++ b/src/libstd/old_io/test.rs @@ -92,7 +92,7 @@ fn base_port() -> u16 { let mut final_base = base; - for &(dir, base) in bases.iter() { + for &(dir, base) in &bases { if path_s.contains(dir) { final_base = base; break; diff --git a/src/libstd/os.rs b/src/libstd/os.rs index 600ca60349a..2df97bf7752 100644 --- a/src/libstd/os.rs +++ b/src/libstd/os.rs @@ -146,7 +146,7 @@ pub fn env_as_bytes() -> Vec<(Vec<u8>,Vec<u8>)> { unsafe { fn env_convert(input: Vec<Vec<u8>>) -> Vec<(Vec<u8>, Vec<u8>)> { let mut pairs = Vec::new(); - for p in input.iter() { + for p in &input { let mut it = p.splitn(1, |b| *b == b'='); let key = it.next().unwrap().to_vec(); let default: &[u8] = &[]; @@ -972,7 +972,7 @@ impl MemoryMap { let mut custom_flags = false; let len = round_up(min_len, page_size()); - for &o in options.iter() { + for &o in options { match o { MapReadable => { prot |= libc::PROT_READ; }, MapWritable => { prot |= libc::PROT_WRITE; }, @@ -1051,7 +1051,7 @@ impl MemoryMap { let mut offset: uint = 0; let len = round_up(min_len, page_size()); - for &o in options.iter() { + for &o in options { match o { MapReadable => { readable = true; }, MapWritable => { writable = true; }, @@ -1522,7 +1522,7 @@ mod tests { fn test_env_getenv() { let e = env(); assert!(e.len() > 0u); - for p in e.iter() { + for p in &e { let (n, v) = (*p).clone(); debug!("{}", n); let v2 = getenv(n.as_slice()); @@ -1577,7 +1577,7 @@ mod tests { setenv("HOME", ""); assert!(os::homedir().is_none()); - for s in oldhome.iter() { + if let Some(s) = oldhome { setenv("HOME", s.as_slice()); } } @@ -1606,10 +1606,10 @@ mod tests { setenv("USERPROFILE", "/home/PaloAlto"); assert!(os::homedir() == Some(Path::new("/home/MountainView"))); - for s in oldhome.iter() { + if let Some(s) = oldhome { setenv("HOME", s.as_slice()); } - for s in olduserprofile.iter() { + if let Some(s) = olduserprofile { setenv("USERPROFILE", s.as_slice()); } } diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index 74b8757f6a9..0d80258d7e0 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -623,11 +623,11 @@ pub trait GenericPath: Clone + GenericPathUnsafe { fn push_many<T: BytesContainer>(&mut self, paths: &[T]) { let t: Option<&T> = None; if BytesContainer::is_str(t) { - for p in paths.iter() { + for p in paths { self.push(p.container_as_str().unwrap()) } } else { - for p in paths.iter() { + for p in paths { self.push(p.container_as_bytes()) } } diff --git a/src/libstd/rand/mod.rs b/src/libstd/rand/mod.rs index 211abc2fc83..cc72c5bed99 100644 --- a/src/libstd/rand/mod.rs +++ b/src/libstd/rand/mod.rs @@ -464,7 +464,7 @@ mod test { // check every remainder mod 8, both in small and big vectors. let lengths = [0, 1, 2, 3, 4, 5, 6, 7, 80, 81, 82, 83, 84, 85, 86, 87]; - for &n in lengths.iter() { + for &n in &lengths { let mut v = repeat(0u8).take(n).collect::<Vec<_>>(); r.fill_bytes(v.as_mut_slice()); diff --git a/src/libstd/rand/os.rs b/src/libstd/rand/os.rs index 992afb2d10f..4b45d5501c2 100644 --- a/src/libstd/rand/os.rs +++ b/src/libstd/rand/os.rs @@ -404,7 +404,7 @@ mod test { } // start all the tasks - for tx in txs.iter() { + for tx in &txs { tx.send(()).unwrap(); } } diff --git a/src/libstd/rt/unwind.rs b/src/libstd/rt/unwind.rs index 757aecaaaff..81ca5aa0e8a 100644 --- a/src/libstd/rt/unwind.rs +++ b/src/libstd/rt/unwind.rs @@ -551,7 +551,7 @@ fn begin_unwind_inner(msg: Box<Any + Send>, file_line: &(&'static str, uint)) -> let amt = CALLBACK_CNT.load(Ordering::SeqCst); &CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)] }; - for cb in callbacks.iter() { + for cb in callbacks { match cb.load(Ordering::SeqCst) { 0 => {} n => { diff --git a/src/libstd/sys/common/net.rs b/src/libstd/sys/common/net.rs index 51b6e0a1c1e..36467a6dc9d 100644 --- a/src/libstd/sys/common/net.rs +++ b/src/libstd/sys/common/net.rs @@ -554,7 +554,7 @@ pub fn await(fds: &[sock_t], deadline: Option<u64>, status: SocketStatus) -> IoResult<()> { let mut set: c::fd_set = unsafe { mem::zeroed() }; let mut max = 0; - for &fd in fds.iter() { + for &fd in fds { c::fd_set(&mut set, fd); max = cmp::max(max, fd + 1); } diff --git a/src/libstd/sys/unix/process.rs b/src/libstd/sys/unix/process.rs index b004a47f8a3..f35015efd94 100644 --- a/src/libstd/sys/unix/process.rs +++ b/src/libstd/sys/unix/process.rs @@ -570,7 +570,7 @@ fn with_envp<K,V,T,F>(env: Option<&HashMap<K, V>>, Some(env) => { let mut tmps = Vec::with_capacity(env.len()); - for pair in env.iter() { + for pair in env { let mut kv = Vec::new(); kv.push_all(pair.0.container_as_bytes()); kv.push('=' as u8); diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index a82259ad5ec..3ac54f2e793 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -240,7 +240,7 @@ pub fn split_paths(unparsed: &[u8]) -> Vec<Path> { let mut in_progress = Vec::new(); let mut in_quote = false; - for b in unparsed.iter() { + for b in unparsed { match *b { b';' if !in_quote => { parsed.push(Path::new(in_progress.as_slice())); diff --git a/src/libstd/sys/windows/process.rs b/src/libstd/sys/windows/process.rs index 3d66718d00b..a0a0c08ed09 100644 --- a/src/libstd/sys/windows/process.rs +++ b/src/libstd/sys/windows/process.rs @@ -142,7 +142,7 @@ impl Process { // To have the spawning semantics of unix/windows stay the same, we need to // read the *child's* PATH if one is provided. See #15149 for more details. let program = cfg.env().and_then(|env| { - for (key, v) in env.iter() { + for (key, v) in env { if b"PATH" != key.container_as_bytes() { continue } // Split the value and test each path to see if the @@ -372,7 +372,7 @@ fn make_command_line(prog: &CString, args: &[CString]) -> String { let mut cmd = String::new(); append_arg(&mut cmd, str::from_utf8(prog.as_bytes()).ok() .expect("expected program name to be utf-8 encoded")); - for arg in args.iter() { + for arg in args { cmd.push(' '); append_arg(&mut cmd, str::from_utf8(arg.as_bytes()).ok() .expect("expected argument to be utf-8 encoded")); @@ -437,7 +437,7 @@ fn with_envp<K, V, T, F>(env: Option<&collections::HashMap<K, V>>, cb: F) -> T Some(env) => { let mut blk = Vec::new(); - for pair in env.iter() { + for pair in env { let kv = format!("{}={}", pair.0.container_as_str().unwrap(), pair.1.container_as_str().unwrap()); diff --git a/src/libstd/sys/windows/thread_local.rs b/src/libstd/sys/windows/thread_local.rs index 0f8ceed39a6..5c73233d94b 100644 --- a/src/libstd/sys/windows/thread_local.rs +++ b/src/libstd/sys/windows/thread_local.rs @@ -248,7 +248,7 @@ unsafe fn run_dtors() { DTOR_LOCK.unlock(); ret }; - for &(key, dtor) in dtors.iter() { + for &(key, dtor) in &dtors { let ptr = TlsGetValue(key); if !ptr.is_null() { TlsSetValue(key, ptr::null_mut()); diff --git a/src/libstd/thread_local/mod.rs b/src/libstd/thread_local/mod.rs index 2a9bf452329..d4d777789dd 100644 --- a/src/libstd/thread_local/mod.rs +++ b/src/libstd/thread_local/mod.rs @@ -425,7 +425,7 @@ mod imp { unsafe extern fn run_dtors(mut ptr: *mut u8) { while !ptr.is_null() { let list: Box<List> = mem::transmute(ptr); - for &(ptr, dtor) in list.iter() { + for &(ptr, dtor) in &*list { dtor(ptr); } ptr = DTORS.get(); diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index 8546e03cc87..5535e5911e0 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -730,7 +730,7 @@ impl<'ast> NodeCollector<'ast> { } fn visit_fn_decl(&mut self, decl: &'ast FnDecl) { - for a in decl.inputs.iter() { + for a in &decl.inputs { self.insert(a.id, NodeArg(&*a.pat)); } } @@ -743,7 +743,7 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { self.parent = i.id; match i.node { ItemImpl(_, _, _, _, _, ref impl_items) => { - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { MethodImplItem(ref m) => { self.insert(m.id, NodeImplItem(impl_item)); @@ -755,12 +755,12 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { } } ItemEnum(ref enum_definition, _) => { - for v in enum_definition.variants.iter() { + for v in &enum_definition.variants { self.insert(v.node.id, NodeVariant(&**v)); } } ItemForeignMod(ref nm) => { - for nitem in nm.items.iter() { + for nitem in &nm.items { self.insert(nitem.id, NodeForeignItem(&**nitem)); } } @@ -774,13 +774,13 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { } } ItemTrait(_, _, ref bounds, ref trait_items) => { - for b in bounds.iter() { + for b in &**bounds { if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b { self.insert(t.trait_ref.ref_id, NodeItem(i)); } } - for tm in trait_items.iter() { + for tm in trait_items { match *tm { RequiredMethod(ref m) => { self.insert(m.id, NodeTraitItem(tm)); diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 7f1264ac9a1..8fc9012829b 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -302,7 +302,7 @@ pub fn split_trait_methods(trait_methods: &[TraitItem]) -> (Vec<TypeMethod>, Vec<P<Method>> ) { let mut reqd = Vec::new(); let mut provd = Vec::new(); - for trt_method in trait_methods.iter() { + for trt_method in trait_methods { match *trt_method { RequiredMethod(ref tm) => reqd.push((*tm).clone()), ProvidedMethod(ref m) => provd.push((*m).clone()), @@ -391,10 +391,10 @@ pub struct IdVisitor<'a, O:'a> { impl<'a, O: IdVisitingOperation> IdVisitor<'a, O> { fn visit_generics_helper(&mut self, generics: &Generics) { - for type_parameter in generics.ty_params.iter() { + for type_parameter in &*generics.ty_params { self.operation.visit_id(type_parameter.id) } - for lifetime in generics.lifetimes.iter() { + for lifetime in &generics.lifetimes { self.operation.visit_id(lifetime.lifetime.id) } } @@ -430,14 +430,14 @@ impl<'a, 'v, O: IdVisitingOperation> Visitor<'v> for IdVisitor<'a, O> { ViewPathSimple(_, _) | ViewPathGlob(_) => {} ViewPathList(_, ref paths) => { - for path in paths.iter() { + for path in paths { self.operation.visit_id(path.node.id()) } } } } ItemEnum(ref enum_definition, _) => { - for variant in enum_definition.variants.iter() { + for variant in &enum_definition.variants { self.operation.visit_id(variant.node.id) } } @@ -511,7 +511,7 @@ impl<'a, 'v, O: IdVisitingOperation> Visitor<'v> for IdVisitor<'a, O> { visit::FkFnBlock => {} } - for argument in function_declaration.inputs.iter() { + for argument in &function_declaration.inputs { self.operation.visit_id(argument.id) } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 4427a7aaf02..61ddd240abc 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -394,7 +394,7 @@ fn find_stability_generic<'a, let mut feature = None; let mut since = None; let mut reason = None; - for meta in metas.iter() { + for meta in metas { if meta.name().get() == "feature" { match meta.value_str() { Some(v) => feature = Some(v), @@ -496,7 +496,7 @@ pub fn find_stability(diagnostic: &SpanHandler, attrs: &[Attribute], pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) { let mut set = HashSet::new(); - for meta in metas.iter() { + for meta in metas { let name = meta.name(); if !set.insert(name.clone()) { @@ -518,7 +518,7 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt match attr.node.value.node { ast::MetaList(ref s, ref items) if *s == "repr" => { mark_used(attr); - for item in items.iter() { + for item in items { match item.node { ast::MetaWord(ref word) => { let hint = match word.get() { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 8adb9b24222..bd7a7783580 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -453,7 +453,7 @@ impl CodeMap { } pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> { - for fm in self.files.borrow().iter() { + for fm in &*self.files.borrow() { if filename == fm.name { return fm.clone(); } @@ -477,7 +477,7 @@ impl CodeMap { // The number of extra bytes due to multibyte chars in the FileMap let mut total_extra_bytes = 0; - for mbc in map.multibyte_chars.borrow().iter() { + for mbc in &*map.multibyte_chars.borrow() { debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); if mbc.pos < bpos { // every character is at least one byte, so we only diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 821ac8e2f89..19c4c3cabfc 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -461,7 +461,7 @@ fn highlight_lines(err: &mut EmitterWriter, elided = true; } // Print the offending lines - for &line_number in display_lines.iter() { + for &line_number in display_lines { if let Some(line) = fm.get_line(line_number) { try!(write!(&mut err.dst, "{}:{} {}\n", fm.name, line_number + 1, line)); @@ -550,7 +550,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, last_line_number + 1, last_line)); } } else { - for &line_number in lines.iter() { + for &line_number in lines { if let Some(line) = fm.get_line(line_number) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, line_number + 1, line)); diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 1b84d93738d..39c67c60511 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -427,7 +427,7 @@ impl<'a> TraitDef<'a> { bounds.push(cx.typarambound(trait_path.clone())); // also add in any bounds from the declaration - for declared_bound in ty_param.bounds.iter() { + for declared_bound in &*ty_param.bounds { bounds.push((*declared_bound).clone()); } @@ -974,7 +974,7 @@ impl<'a> MethodDef<'a> { subpats.push(p); idents }; - for self_arg_name in self_arg_names.tail().iter() { + for self_arg_name in self_arg_names.tail() { let (p, idents) = mk_self_pat(cx, &self_arg_name[]); subpats.push(p); self_pats_idents.push(idents); diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index e7f546b2691..efd93226618 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -100,7 +100,7 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) _ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`") }; - for &FieldInfo { ref self_, span, .. } in fields.iter() { + for &FieldInfo { ref self_, span, .. } in fields { stmts.push(call_hash(span, self_.clone())); } diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index c694b054ba3..ae7b20f7853 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -93,7 +93,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure let mut arms = Vec::new(); - for variant in enum_def.variants.iter() { + for variant in &enum_def.variants { match variant.node.kind { ast::TupleVariantKind(ref args) => { if !args.is_empty() { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 5736400313e..8b4816f5d2b 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -504,7 +504,7 @@ fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander) return it.expect_item(); } - for attr in modifiers.iter() { + for attr in &modifiers { let mname = attr.name(); match fld.cx.syntax_env.find(&intern(mname.get())) { @@ -552,7 +552,7 @@ fn expand_item_underscore(item: ast::Item_, fld: &mut MacroExpander) -> ast::Ite // does this attribute list contain "macro_use" ? fn contains_macro_use(fld: &mut MacroExpander, attrs: &[ast::Attribute]) -> bool { - for attr in attrs.iter() { + for attr in attrs { let mut is_use = attr.check_name("macro_use"); if attr.check_name("macro_escape") { fld.cx.span_warn(attr.span, "macro_escape is a deprecated synonym for macro_use"); @@ -853,7 +853,7 @@ impl<'v> Visitor<'v> for PatIdentFinder { ast::Pat { id: _, node: ast::PatIdent(_, ref path1, ref inner), span: _ } => { self.ident_accumulator.push(path1.node); // visit optional subpattern of PatIdent: - for subpat in inner.iter() { + if let Some(ref subpat) = *inner { self.visit_pat(&**subpat) } } @@ -873,7 +873,7 @@ fn pattern_bindings(pat: &ast::Pat) -> Vec<ast::Ident> { /// find the PatIdent paths in a fn fn_decl_arg_bindings(fn_decl: &ast::FnDecl) -> Vec<ast::Ident> { let mut pat_idents = PatIdentFinder{ident_accumulator:Vec::new()}; - for arg in fn_decl.inputs.iter() { + for arg in &fn_decl.inputs { pat_idents.visit_pat(&*arg.pat); } pat_idents.ident_accumulator @@ -1063,7 +1063,7 @@ fn expand_annotatable(a: Annotatable, let mut decorator_items = SmallVector::zero(); let mut new_attrs = Vec::new(); - for attr in a.attrs().iter() { + for attr in a.attrs() { let mname = attr.name(); match fld.cx.syntax_env.find(&intern(mname.get())) { @@ -1218,7 +1218,7 @@ fn expand_item_multi_modifier(mut it: Annotatable, return it } - for attr in modifiers.iter() { + for attr in &modifiers { let mname = attr.name(); match fld.cx.syntax_env.find(&intern(mname.get())) { diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 36dbf117604..16aaccb0207 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -499,7 +499,7 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx.expr_ident(e.span, name))); heads.push(self.ecx.expr_addr_of(e.span, e)); } - for name in self.name_ordering.iter() { + for name in &self.name_ordering { let e = match self.names.remove(name) { Some(e) => e, None => continue @@ -706,7 +706,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, cx.ecx.span_err(cx.args[i].span, "argument never used"); } } - for (name, e) in cx.names.iter() { + for (name, e) in &cx.names { if !cx.name_types.contains_key(name) { cx.ecx.span_err(e.span, "named argument never used"); } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 0f617302c92..e9c6629be3c 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -694,7 +694,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> { fn mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> Vec<P<ast::Stmt>> { let mut ss = Vec::new(); - for tt in tts.iter() { + for tt in tts { ss.extend(mk_tt(cx, tt).into_iter()); } ss diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index e3211c7c337..ac4a149a3ed 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -209,12 +209,12 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) { match m { &TtSequence(_, ref seq) => { - for next_m in seq.tts.iter() { + for next_m in &seq.tts { n_rec(p_s, next_m, res, ret_val, idx) } } &TtDelimited(_, ref delim) => { - for next_m in delim.tts.iter() { + for next_m in &delim.tts { n_rec(p_s, next_m, res, ret_val, idx) } } @@ -239,7 +239,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) } let mut ret_val = HashMap::new(); let mut idx = 0us; - for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) } + for m in ms { n_rec(p_s, m, res, &mut ret_val, &mut idx) } ret_val } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 666281ac6b6..ac9f375e0a4 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -259,7 +259,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, _ => cx.span_bug(def.span, "wrong-structured lhs") }; - for lhs in lhses.iter() { + for lhs in &lhses { check_lhs_nt_follows(cx, &**lhs, def.span); } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 775cfede70d..4e76359e930 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -253,7 +253,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_item(&mut self, i: &ast::Item) { - for attr in i.attrs.iter() { + for attr in &i.attrs { if attr.name() == "thread_local" { self.gate_feature("thread_local", i.span, "`#[thread_local]` is an experimental feature, and does not \ @@ -508,7 +508,7 @@ fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::C let mut unknown_features = Vec::new(); - for attr in krate.attrs.iter() { + for attr in &krate.attrs { if !attr.check_name("feature") { continue } @@ -519,7 +519,7 @@ fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::C expected #![feature(...)]"); } Some(list) => { - for mi in list.iter() { + for mi in list { let name = match mi.node { ast::MetaWord(ref word) => (*word).clone(), _ => { diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 926385ccd11..7269afcaea4 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -90,7 +90,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut i = usize::MAX; let mut can_trim = true; let mut first = true; - for line in lines.iter() { + for line in &lines { for (j, c) in line.chars().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; @@ -125,7 +125,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { // one-line comments lose their prefix static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; - for prefix in ONLINERS.iter() { + for prefix in ONLINERS { if comment.starts_with(*prefix) { return (&comment[prefix.len()..]).to_string(); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2cf6058a433..ca6193508fe 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1526,7 +1526,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) fn check_tokenization (mut string_reader: StringReader, expected: Vec<token::Token> ) { - for expected_tok in expected.iter() { + for expected_tok in &expected { assert_eq!(&string_reader.next_token().tok, expected_tok); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8ac5b6e5274..81803d6bd89 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1163,7 +1163,7 @@ mod test { "impl z { fn a (self: Foo, &myarg: i32) {} }", ]; - for &src in srcs.iter() { + for &src in &srcs { let spans = get_spans_of_pat_idents(src); let Span{ lo, hi, .. } = spans[0]; assert!("self" == &src[lo.to_usize()..hi.to_usize()], diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d99095eeba3..b7960d9e709 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2706,7 +2706,7 @@ impl<'a> Parser<'a> { match self.token { token::Eof => { let open_braces = self.open_braces.clone(); - for sp in open_braces.iter() { + for sp in &open_braces { self.span_help(*sp, "did you mean to close this delimiter?"); } // There shouldn't really be a span, but it's easier for the test runner @@ -5200,7 +5200,7 @@ impl<'a> Parser<'a> { Some(i) => { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); - for p in included_mod_stack[i.. len].iter() { + for p in &included_mod_stack[i.. len] { err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 5367ccc1357..21cd02b3851 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -593,7 +593,7 @@ impl<'a> State<'a> { { try!(self.rbox(0us, b)); let mut first = true; - for elt in elts.iter() { + for elt in elts { if first { first = false; } else { try!(self.word_space(",")); } try!(op(self, elt)); } @@ -612,7 +612,7 @@ impl<'a> State<'a> { try!(self.rbox(0us, b)); let len = elts.len(); let mut i = 0us; - for elt in elts.iter() { + for elt in elts { try!(self.maybe_print_comment(get_span(elt).hi)); try!(op(self, elt)); i += 1us; @@ -634,7 +634,7 @@ impl<'a> State<'a> { pub fn print_mod(&mut self, _mod: &ast::Mod, attrs: &[ast::Attribute]) -> IoResult<()> { try!(self.print_inner_attributes(attrs)); - for item in _mod.items.iter() { + for item in &_mod.items { try!(self.print_item(&**item)); } Ok(()) @@ -643,7 +643,7 @@ impl<'a> State<'a> { pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod, attrs: &[ast::Attribute]) -> IoResult<()> { try!(self.print_inner_attributes(attrs)); - for item in nmod.items.iter() { + for item in &nmod.items { try!(self.print_foreign_item(&**item)); } Ok(()) @@ -651,8 +651,8 @@ impl<'a> State<'a> { pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) -> IoResult<()> { - for l in lifetime.iter() { - try!(self.print_lifetime(l)); + if let Some(l) = *lifetime { + try!(self.print_lifetime(&l)); try!(self.nbsp()); } Ok(()) @@ -799,7 +799,7 @@ impl<'a> State<'a> { ast::ItemExternCrate(ref optional_path) => { try!(self.head(&visibility_qualified(item.vis, "extern crate")[])); - for &(ref p, style) in optional_path.iter() { + if let Some((ref p, style)) = *optional_path { try!(self.print_string(p.get(), style)); try!(space(&mut self.s)); try!(word(&mut self.s, "as")); @@ -945,7 +945,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.bopen()); try!(self.print_inner_attributes(&item.attrs[])); - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { ast::MethodImplItem(ref meth) => { try!(self.print_method(&**meth)); @@ -979,7 +979,7 @@ impl<'a> State<'a> { try!(self.print_where_clause(generics)); try!(word(&mut self.s, " ")); try!(self.bopen()); - for meth in methods.iter() { + for meth in methods { try!(self.print_trait_method(meth)); } try!(self.bclose(item.span)); @@ -1010,7 +1010,7 @@ impl<'a> State<'a> { if !t.bound_lifetimes.is_empty() { try!(word(&mut self.s, "for<")); let mut comma = false; - for lifetime_def in t.bound_lifetimes.iter() { + for lifetime_def in &t.bound_lifetimes { if comma { try!(self.word_space(",")) } @@ -1039,7 +1039,7 @@ impl<'a> State<'a> { variants: &[P<ast::Variant>], span: codemap::Span) -> IoResult<()> { try!(self.bopen()); - for v in variants.iter() { + for v in variants { try!(self.space_if_not_bol()); try!(self.maybe_print_comment(v.span.lo)); try!(self.print_outer_attributes(&v.node.attrs[])); @@ -1094,7 +1094,7 @@ impl<'a> State<'a> { try!(self.bopen()); try!(self.hardbreak_if_not_bol()); - for field in struct_def.fields.iter() { + for field in &struct_def.fields { match field.node.kind { ast::UnnamedField(..) => panic!("unexpected unnamed field"), ast::NamedField(ident, visibility) => { @@ -1141,7 +1141,7 @@ impl<'a> State<'a> { }, ast::TtSequence(_, ref seq) => { try!(word(&mut self.s, "$(")); - for tt_elt in seq.tts.iter() { + for tt_elt in &seq.tts { try!(self.print_tt(tt_elt)); } try!(word(&mut self.s, ")")); @@ -1278,7 +1278,7 @@ impl<'a> State<'a> { pub fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) -> IoResult<()> { let mut count = 0us; - for attr in attrs.iter() { + for attr in attrs { match attr.node.style { ast::AttrOuter => { try!(self.print_attribute(attr)); @@ -1296,7 +1296,7 @@ impl<'a> State<'a> { pub fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) -> IoResult<()> { let mut count = 0us; - for attr in attrs.iter() { + for attr in attrs { match attr.node.style { ast::AttrInner => { try!(self.print_attribute(attr)); @@ -1395,7 +1395,7 @@ impl<'a> State<'a> { try!(self.print_inner_attributes(attrs)); - for st in blk.stmts.iter() { + for st in &blk.stmts { try!(self.print_stmt(&**st)); } match blk.expr { @@ -1691,8 +1691,8 @@ impl<'a> State<'a> { try!(self.print_if_let(&**pat, &**expr, &** blk, elseopt.as_ref().map(|e| &**e))); } ast::ExprWhile(ref test, ref blk, opt_ident) => { - for ident in opt_ident.iter() { - try!(self.print_ident(*ident)); + if let Some(ident) = opt_ident { + try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("while")); @@ -1701,8 +1701,8 @@ impl<'a> State<'a> { try!(self.print_block(&**blk)); } ast::ExprWhileLet(ref pat, ref expr, ref blk, opt_ident) => { - for ident in opt_ident.iter() { - try!(self.print_ident(*ident)); + if let Some(ident) = opt_ident { + try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("while let")); @@ -1714,8 +1714,8 @@ impl<'a> State<'a> { try!(self.print_block(&**blk)); } ast::ExprForLoop(ref pat, ref iter, ref blk, opt_ident) => { - for ident in opt_ident.iter() { - try!(self.print_ident(*ident)); + if let Some(ident) = opt_ident { + try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("for")); @@ -1727,8 +1727,8 @@ impl<'a> State<'a> { try!(self.print_block(&**blk)); } ast::ExprLoop(ref blk, opt_ident) => { - for ident in opt_ident.iter() { - try!(self.print_ident(*ident)); + if let Some(ident) = opt_ident { + try!(self.print_ident(ident)); try!(self.word_space(":")); } try!(self.head("loop")); @@ -1742,7 +1742,7 @@ impl<'a> State<'a> { try!(self.print_expr(&**expr)); try!(space(&mut self.s)); try!(self.bopen()); - for arm in arms.iter() { + for arm in arms { try!(self.print_arm(arm)); } try!(self.bclose_(expr.span, indent_unit)); @@ -1825,16 +1825,16 @@ impl<'a> State<'a> { ast::ExprBreak(opt_ident) => { try!(word(&mut self.s, "break")); try!(space(&mut self.s)); - for ident in opt_ident.iter() { - try!(self.print_ident(*ident)); + if let Some(ident) = opt_ident { + try!(self.print_ident(ident)); try!(space(&mut self.s)); } } ast::ExprAgain(opt_ident) => { try!(word(&mut self.s, "continue")); try!(space(&mut self.s)); - for ident in opt_ident.iter() { - try!(self.print_ident(*ident)); + if let Some(ident) = opt_ident { + try!(self.print_ident(ident)); try!(space(&mut self.s)) } } @@ -1991,7 +1991,7 @@ impl<'a> State<'a> { } let mut first = true; - for segment in path.segments.iter() { + for segment in &path.segments { if first { first = false } else { @@ -2040,7 +2040,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "<")); let mut comma = false; - for lifetime in data.lifetimes.iter() { + for lifetime in &data.lifetimes { if comma { try!(self.word_space(",")) } @@ -2059,7 +2059,7 @@ impl<'a> State<'a> { comma = true; } - for binding in data.bindings.iter() { + for binding in &*data.bindings { if comma { try!(self.word_space(",")) } @@ -2193,7 +2193,7 @@ impl<'a> State<'a> { try!(self.commasep(Inconsistent, &before[], |s, p| s.print_pat(&**p))); - for p in slice.iter() { + if let Some(ref p) = *slice { if !before.is_empty() { try!(self.word_space(",")); } try!(self.print_pat(&**p)); match **p { @@ -2224,7 +2224,7 @@ impl<'a> State<'a> { try!(self.ibox(0us)); try!(self.print_outer_attributes(&arm.attrs[])); let mut first = true; - for p in arm.pats.iter() { + for p in &arm.pats { if first { first = false; } else { @@ -2309,7 +2309,7 @@ impl<'a> State<'a> { // self type and the args all in the same box. try!(self.rbox(0us, Inconsistent)); let mut first = true; - for &explicit_self in opt_explicit_self.iter() { + if let Some(explicit_self) = opt_explicit_self { let m = match explicit_self { &ast::SelfStatic => ast::MutImmutable, _ => match decl.inputs[0].pat.node { @@ -2327,7 +2327,7 @@ impl<'a> State<'a> { &decl.inputs[1..] }; - for arg in args.iter() { + for arg in args { if first { first = false; } else { try!(self.word_space(",")); } try!(self.print_arg(arg)); } @@ -2397,7 +2397,7 @@ impl<'a> State<'a> { if !bounds.is_empty() { try!(word(&mut self.s, prefix)); let mut first = true; - for bound in bounds.iter() { + for bound in bounds { try!(self.nbsp()); if first { first = false; @@ -2437,7 +2437,7 @@ impl<'a> State<'a> { { try!(self.print_lifetime(&lifetime.lifetime)); let mut sep = ":"; - for v in lifetime.bounds.iter() { + for v in &lifetime.bounds { try!(word(&mut self.s, sep)); try!(self.print_lifetime(v)); sep = "+"; @@ -2792,7 +2792,7 @@ impl<'a> State<'a> { } ast::LitBinary(ref v) => { let mut escaped: String = String::new(); - for &ch in v.iter() { + for &ch in &**v { ascii::escape_default(ch as u8, |ch| escaped.push(ch as char)); } @@ -2842,7 +2842,7 @@ impl<'a> State<'a> { } comments::Isolated => { try!(self.hardbreak_if_not_bol()); - for line in cmnt.lines.iter() { + for line in &cmnt.lines { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { @@ -2859,7 +2859,7 @@ impl<'a> State<'a> { hardbreak(&mut self.s) } else { try!(self.ibox(0us)); - for line in cmnt.lines.iter() { + for line in &cmnt.lines { if !line.is_empty() { try!(word(&mut self.s, &line[])); } diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 1b35b1b04a3..51144267519 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -40,7 +40,7 @@ impl<T: Eq + Hash<Hasher> + Clone + 'static> Interner<T> { pub fn prefill(init: &[T]) -> Interner<T> { let rv = Interner::new(); - for v in init.iter() { + for v in init { rv.intern((*v).clone()); } rv @@ -158,7 +158,7 @@ impl StrInterner { pub fn prefill(init: &[&str]) -> StrInterner { let rv = StrInterner::new(); - for &v in init.iter() { rv.intern(v); } + for &v in init { rv.intern(v); } rv } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index eb906788aa7..bd84306fe17 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -159,13 +159,13 @@ pub fn walk_inlined_item<'v,V>(visitor: &mut V, item: &'v InlinedItem) pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) { visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID); - for attr in krate.attrs.iter() { + for attr in &krate.attrs { visitor.visit_attribute(attr); } } pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod) { - for item in module.items.iter() { + for item in &module.items { visitor.visit_item(&**item) } } @@ -179,7 +179,7 @@ pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) { pub fn walk_lifetime_def<'v, V: Visitor<'v>>(visitor: &mut V, lifetime_def: &'v LifetimeDef) { visitor.visit_name(lifetime_def.lifetime.span, lifetime_def.lifetime.name); - for bound in lifetime_def.bounds.iter() { + for bound in &lifetime_def.bounds { visitor.visit_lifetime_bound(bound); } } @@ -239,7 +239,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_path(path, item.id); } ViewPathList(ref prefix, ref list) => { - for id in list.iter() { + for id in list { match id.node { PathListIdent { name, .. } => { visitor.visit_ident(id.span, name); @@ -270,7 +270,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_mod(module, item.span, item.id) } ItemForeignMod(ref foreign_module) => { - for foreign_item in foreign_module.items.iter() { + for foreign_item in &foreign_module.items { visitor.visit_foreign_item(&**foreign_item) } } @@ -293,7 +293,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { None => () } visitor.visit_ty(&**typ); - for impl_item in impl_items.iter() { + for impl_item in impl_items { match *impl_item { MethodImplItem(ref method) => { walk_method_helper(visitor, &**method) @@ -315,13 +315,13 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { ItemTrait(_, ref generics, ref bounds, ref methods) => { visitor.visit_generics(generics); walk_ty_param_bounds_helper(visitor, bounds); - for method in methods.iter() { + for method in methods { visitor.visit_trait_item(method) } } ItemMac(ref mac) => visitor.visit_mac(mac), } - for attr in item.attrs.iter() { + for attr in &item.attrs { visitor.visit_attribute(attr); } } @@ -329,7 +329,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V, enum_definition: &'v EnumDef, generics: &'v Generics) { - for variant in enum_definition.variants.iter() { + for variant in &enum_definition.variants { visitor.visit_variant(&**variant, generics); } } @@ -341,7 +341,7 @@ pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, match variant.node.kind { TupleVariantKind(ref variant_arguments) => { - for variant_argument in variant_arguments.iter() { + for variant_argument in variant_arguments { visitor.visit_ty(&*variant_argument.ty) } } @@ -356,7 +356,7 @@ pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, Some(ref expr) => visitor.visit_expr(&**expr), None => () } - for attr in variant.node.attrs.iter() { + for attr in &variant.node.attrs { visitor.visit_attribute(attr); } } @@ -385,12 +385,12 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { visitor.visit_ty(&*mutable_type.ty) } TyTup(ref tuple_element_types) => { - for tuple_element_type in tuple_element_types.iter() { + for tuple_element_type in tuple_element_types { visitor.visit_ty(&**tuple_element_type) } } TyBareFn(ref function_declaration) => { - for argument in function_declaration.decl.inputs.iter() { + for argument in &function_declaration.decl.inputs { visitor.visit_ty(&*argument.ty) } walk_fn_ret_ty(visitor, &function_declaration.decl.output); @@ -422,13 +422,13 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { pub fn walk_lifetime_decls_helper<'v, V: Visitor<'v>>(visitor: &mut V, lifetimes: &'v Vec<LifetimeDef>) { - for l in lifetimes.iter() { + for l in lifetimes { visitor.visit_lifetime_def(l); } } pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) { - for segment in path.segments.iter() { + for segment in &path.segments { visitor.visit_path_segment(path.span, segment); } } @@ -453,21 +453,21 @@ pub fn walk_path_parameters<'v, V: Visitor<'v>>(visitor: &mut V, path_parameters: &'v PathParameters) { match *path_parameters { ast::AngleBracketedParameters(ref data) => { - for typ in data.types.iter() { + for typ in &*data.types { visitor.visit_ty(&**typ); } - for lifetime in data.lifetimes.iter() { + for lifetime in &data.lifetimes { visitor.visit_lifetime_ref(lifetime); } - for binding in data.bindings.iter() { + for binding in &*data.bindings { visitor.visit_assoc_type_binding(&**binding); } } ast::ParenthesizedParameters(ref data) => { - for typ in data.inputs.iter() { + for typ in &data.inputs { visitor.visit_ty(&**typ); } - for typ in data.output.iter() { + if let Some(ref typ) = data.output { visitor.visit_ty(&**typ); } } @@ -484,20 +484,20 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { match pattern.node { PatEnum(ref path, ref children) => { visitor.visit_path(path, pattern.id); - for children in children.iter() { - for child in children.iter() { - visitor.visit_pat(&**child) + if let Some(ref children) = *children { + for child in children { + visitor.visit_pat(&*child) } } } PatStruct(ref path, ref fields, _) => { visitor.visit_path(path, pattern.id); - for field in fields.iter() { + for field in fields { visitor.visit_pat(&*field.node.pat) } } PatTup(ref tuple_elements) => { - for tuple_element in tuple_elements.iter() { + for tuple_element in tuple_elements { visitor.visit_pat(&**tuple_element) } } @@ -519,13 +519,13 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { } PatWild(_) => (), PatVec(ref prepattern, ref slice_pattern, ref postpatterns) => { - for prepattern in prepattern.iter() { + for prepattern in prepattern { visitor.visit_pat(&**prepattern) } - for slice_pattern in slice_pattern.iter() { + if let Some(ref slice_pattern) = *slice_pattern { visitor.visit_pat(&**slice_pattern) } - for postpattern in postpatterns.iter() { + for postpattern in postpatterns { visitor.visit_pat(&**postpattern) } } @@ -545,14 +545,14 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, ForeignItemStatic(ref typ, _) => visitor.visit_ty(&**typ), } - for attr in foreign_item.attrs.iter() { + for attr in &foreign_item.attrs { visitor.visit_attribute(attr); } } pub fn walk_ty_param_bounds_helper<'v, V: Visitor<'v>>(visitor: &mut V, bounds: &'v OwnedSlice<TyParamBound>) { - for bound in bounds.iter() { + for bound in &**bounds { visitor.visit_ty_param_bound(bound) } } @@ -576,11 +576,11 @@ pub fn walk_ty_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v TyParam) { } pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) { - for type_parameter in generics.ty_params.iter() { + for type_parameter in &*generics.ty_params { walk_ty_param(visitor, type_parameter); } walk_lifetime_decls_helper(visitor, &generics.lifetimes); - for predicate in generics.where_clause.predicates.iter() { + for predicate in &generics.where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounded_ty, ref bounds, @@ -593,7 +593,7 @@ pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics ..}) => { visitor.visit_lifetime_ref(lifetime); - for bound in bounds.iter() { + for bound in bounds { visitor.visit_lifetime_ref(bound); } } @@ -615,7 +615,7 @@ pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FunctionR } pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl) { - for argument in function_declaration.inputs.iter() { + for argument in &function_declaration.inputs { visitor.visit_pat(&*argument.pat); visitor.visit_ty(&*argument.ty) } @@ -635,7 +635,7 @@ pub fn walk_method_helper<'v, V: Visitor<'v>>(visitor: &mut V, method: &'v Metho &**body, method.span, method.id); - for attr in method.attrs.iter() { + for attr in &method.attrs { visitor.visit_attribute(attr); } @@ -673,12 +673,12 @@ pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V, pub fn walk_ty_method<'v, V: Visitor<'v>>(visitor: &mut V, method_type: &'v TypeMethod) { visitor.visit_ident(method_type.span, method_type.ident); visitor.visit_explicit_self(&method_type.explicit_self); - for argument_type in method_type.decl.inputs.iter() { + for argument_type in &method_type.decl.inputs { visitor.visit_ty(&*argument_type.ty) } visitor.visit_generics(&method_type.generics); walk_fn_ret_ty(visitor, &method_type.decl.output); - for attr in method_type.attrs.iter() { + for attr in &method_type.attrs { visitor.visit_attribute(attr); } } @@ -695,7 +695,7 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_method: &'v Tr pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, struct_definition: &'v StructDef) { - for field in struct_definition.fields.iter() { + for field in &struct_definition.fields { visitor.visit_struct_field(field) } } @@ -708,13 +708,13 @@ pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V, visitor.visit_ty(&*struct_field.node.ty); - for attr in struct_field.node.attrs.iter() { + for attr in &struct_field.node.attrs { visitor.visit_attribute(attr); } } pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) { - for statement in block.stmts.iter() { + for statement in &block.stmts { visitor.visit_stmt(&**statement) } walk_expr_opt(visitor, &block.expr) @@ -746,7 +746,7 @@ pub fn walk_expr_opt<'v, V: Visitor<'v>>(visitor: &mut V, } pub fn walk_exprs<'v, V: Visitor<'v>>(visitor: &mut V, expressions: &'v [P<Expr>]) { - for expression in expressions.iter() { + for expression in expressions { visitor.visit_expr(&**expression) } } @@ -770,25 +770,25 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { } ExprStruct(ref path, ref fields, ref optional_base) => { visitor.visit_path(path, expression.id); - for field in fields.iter() { + for field in fields { visitor.visit_expr(&*field.expr) } walk_expr_opt(visitor, optional_base) } ExprTup(ref subexpressions) => { - for subexpression in subexpressions.iter() { + for subexpression in subexpressions { visitor.visit_expr(&**subexpression) } } ExprCall(ref callee_expression, ref arguments) => { - for argument in arguments.iter() { + for argument in arguments { visitor.visit_expr(&**argument) } visitor.visit_expr(&**callee_expression) } ExprMethodCall(_, ref types, ref arguments) => { walk_exprs(visitor, arguments.as_slice()); - for typ in types.iter() { + for typ in types { visitor.visit_ty(&**typ) } } @@ -832,7 +832,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { ExprLoop(ref block, _) => visitor.visit_block(&**block), ExprMatch(ref subexpression, ref arms, _) => { visitor.visit_expr(&**subexpression); - for arm in arms.iter() { + for arm in arms { visitor.visit_arm(arm) } } @@ -881,11 +881,11 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(&**subexpression) } ExprInlineAsm(ref ia) => { - for input in ia.inputs.iter() { + for input in &ia.inputs { let (_, ref input) = *input; visitor.visit_expr(&**input) } - for output in ia.outputs.iter() { + for output in &ia.outputs { let (_, ref output, _) = *output; visitor.visit_expr(&**output) } @@ -896,12 +896,12 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { } pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) { - for pattern in arm.pats.iter() { + for pattern in &arm.pats { visitor.visit_pat(&**pattern) } walk_expr_opt(visitor, &arm.guard); visitor.visit_expr(&*arm.body); - for attr in arm.attrs.iter() { + for attr in &arm.attrs { visitor.visit_attribute(attr); } } diff --git a/src/libterm/terminfo/parm.rs b/src/libterm/terminfo/parm.rs index 0b51a976c0e..4173744ab4b 100644 --- a/src/libterm/terminfo/parm.rs +++ b/src/libterm/terminfo/parm.rs @@ -109,7 +109,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables) *dst = (*src).clone(); } - for &c in cap.iter() { + for &c in cap { let cur = c as char; let mut old_state = state; match state { @@ -613,7 +613,7 @@ mod test { } let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"]; - for &cap in caps.iter() { + for &cap in &caps { let res = get_res("", cap, &[], vars); assert!(res.is_err(), "Op {} succeeded incorrectly with 0 stack entries", cap); @@ -627,7 +627,7 @@ mod test { "Op {} failed with 1 stack entry: {}", cap, res.err().unwrap()); } let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"]; - for &cap in caps.iter() { + for &cap in &caps { let res = expand(cap.as_bytes(), &[], vars); assert!(res.is_err(), "Binop {} succeeded incorrectly with 0 stack entries", cap); @@ -648,7 +648,7 @@ mod test { #[test] fn test_comparison_ops() { let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])]; - for &(op, bs) in v.iter() { + for &(op, bs) in &v { let s = format!("%{{1}}%{{2}}%{}%d", op); let res = expand(s.as_bytes(), &[], &mut Variables::new()); assert!(res.is_ok(), res.err().unwrap()); diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index 1e84c0462d6..93a59194a4c 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -58,7 +58,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> { }; // Look for the terminal in all of the search directories - for p in dirs_to_search.iter() { + for p in &dirs_to_search { if p.exists() { let f = first_char.to_string(); let newp = p.join_many(&[&f[], term]); diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index f22c58c54a6..a985c049fec 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -576,7 +576,7 @@ impl<T: Writer> ConsoleTestState<T> { try!(self.write_plain("\nfailures:\n")); let mut failures = Vec::new(); let mut fail_out = String::new(); - for &(ref f, ref stdout) in self.failures.iter() { + for &(ref f, ref stdout) in &self.failures { failures.push(f.name.to_string()); if stdout.len() > 0 { fail_out.push_str(format!("---- {} stdout ----\n\t", @@ -593,7 +593,7 @@ impl<T: Writer> ConsoleTestState<T> { try!(self.write_plain("\nfailures:\n")); failures.sort(); - for name in failures.iter() { + for name in &failures { try!(self.write_plain(format!(" {}\n", name.as_slice()).as_slice())); } @@ -652,7 +652,7 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn> ) -> old_io: TrMetrics(mm) => { let tname = test.name.as_slice(); let MetricMap(mm) = mm; - for (k,v) in mm.iter() { + for (k,v) in &mm { st.metrics .insert_metric(format!("{}.{}", tname, @@ -1299,7 +1299,7 @@ mod tests { { fn testfn() { } let mut tests = Vec::new(); - for name in names.iter() { + for name in &names { let test = TestDescAndFn { desc: TestDesc { name: DynTestName((*name).clone()), diff --git a/src/libtest/stats.rs b/src/libtest/stats.rs index 237acbd7b65..54f06fd0662 100644 --- a/src/libtest/stats.rs +++ b/src/libtest/stats.rs @@ -167,7 +167,7 @@ impl<T: Float + FromPrimitive> Stats<T> for [T] { fn sum(&self) -> T { let mut partials = vec![]; - for &x in self.iter() { + for &x in self { let mut x = x; let mut j = 0; // This inner loop applies `hi`/`lo` summation to each @@ -223,7 +223,7 @@ impl<T: Float + FromPrimitive> Stats<T> for [T] { } else { let mean = self.mean(); let mut v: T = Float::zero(); - for s in self.iter() { + for s in self { let x = *s - mean; v = v + x*x; } diff --git a/src/libunicode/normalize.rs b/src/libunicode/normalize.rs index c6f86ccd9d4..e35634ac465 100644 --- a/src/libunicode/normalize.rs +++ b/src/libunicode/normalize.rs @@ -52,7 +52,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) { // First check the canonical decompositions match bsearch_table(c, canonical_table) { Some(canon) => { - for x in canon.iter() { + for x in canon { d(*x, i, k); } return; @@ -66,7 +66,7 @@ fn d<F>(c: char, i: &mut F, k: bool) where F: FnMut(char) { // Then check the compatibility decompositions match bsearch_table(c, compatibility_table) { Some(compat) => { - for x in compat.iter() { + for x in compat { d(*x, i, k); } return; diff --git a/src/rustbook/book.rs b/src/rustbook/book.rs index 2707a6346c8..e7739300790 100644 --- a/src/rustbook/book.rs +++ b/src/rustbook/book.rs @@ -49,7 +49,7 @@ impl<'a> Iterator for BookItems<'a> { let cur = self.cur_items.get(self.cur_idx).unwrap(); let mut section = "".to_string(); - for &(_, idx) in self.stack.iter() { + for &(_, idx) in &self.stack { section.push_str(&(idx + 1).to_string()[]); section.push('.'); } diff --git a/src/test/auxiliary/cci_nested_lib.rs b/src/test/auxiliary/cci_nested_lib.rs index 8494917c615..587af956c77 100644 --- a/src/test/auxiliary/cci_nested_lib.rs +++ b/src/test/auxiliary/cci_nested_lib.rs @@ -35,7 +35,7 @@ pub fn alist_get<A:Clone + 'static, -> B { let eq_fn = lst.eq_fn; let data = lst.data.borrow(); - for entry in (*data).iter() { + for entry in &(*data) { if eq_fn(entry.key.clone(), k.clone()) { return entry.value.clone(); } diff --git a/src/test/bench/shootout-chameneos-redux.rs b/src/test/bench/shootout-chameneos-redux.rs index 15a63e153b9..0835dd9a08e 100644 --- a/src/test/bench/shootout-chameneos-redux.rs +++ b/src/test/bench/shootout-chameneos-redux.rs @@ -47,8 +47,8 @@ use std::thread::Thread; fn print_complements() { let all = [Blue, Red, Yellow]; - for aa in all.iter() { - for bb in all.iter() { + for aa in &all { + for bb in &all { println!("{:?} + {:?} -> {:?}", *aa, *bb, transform(*aa, *bb)); } } @@ -80,7 +80,7 @@ struct CreatureInfo { fn show_color_list(set: Vec<Color>) -> String { let mut out = String::new(); - for col in set.iter() { + for col in &set { out.push(' '); out.push_str(format!("{:?}", col).as_slice()); } diff --git a/src/test/bench/shootout-fasta-redux.rs b/src/test/bench/shootout-fasta-redux.rs index f32e98ef17d..5386fc0419d 100644 --- a/src/test/bench/shootout-fasta-redux.rs +++ b/src/test/bench/shootout-fasta-redux.rs @@ -92,7 +92,7 @@ static HOMO_SAPIENS: [AminoAcid;4] = [ fn sum_and_scale(a: &'static [AminoAcid]) -> Vec<AminoAcid> { let mut result = Vec::new(); let mut p = 0f32; - for a_i in a.iter() { + for a_i in a { let mut a_i = *a_i; p += a_i.p; a_i.p = p * LOOKUP_SCALE; @@ -180,7 +180,7 @@ impl<'a, W: Writer> RandomFasta<'a, W> { fn nextc(&mut self) -> u8 { let r = self.rng(1.0); - for a in self.lookup.iter() { + for a in &self.lookup[] { if a.p >= r { return a.c; } diff --git a/src/test/bench/shootout-k-nucleotide-pipes.rs b/src/test/bench/shootout-k-nucleotide-pipes.rs index 2da276b722e..ad8e6551a03 100644 --- a/src/test/bench/shootout-k-nucleotide-pipes.rs +++ b/src/test/bench/shootout-k-nucleotide-pipes.rs @@ -56,14 +56,14 @@ fn sort_and_fmt(mm: &HashMap<Vec<u8> , uint>, total: uint) -> String { let mut pairs = Vec::new(); // map -> [(k,%)] - for (key, &val) in mm.iter() { + for (key, &val) in mm { pairs.push(((*key).clone(), pct(val, total))); } let pairs_sorted = sortKV(pairs); let mut buffer = String::new(); - for &(ref k, v) in pairs_sorted.iter() { + for &(ref k, v) in &pairs_sorted { buffer.push_str(format!("{:?} {:0.3}\n", k.to_ascii_uppercase(), v).as_slice()); diff --git a/src/test/bench/shootout-k-nucleotide.rs b/src/test/bench/shootout-k-nucleotide.rs index 2c3ade63f06..a7a47ff07ce 100644 --- a/src/test/bench/shootout-k-nucleotide.rs +++ b/src/test/bench/shootout-k-nucleotide.rs @@ -264,7 +264,7 @@ fn print_frequencies(frequencies: &Table, frame: uint) { vector.as_mut_slice().sort(); let mut total_count = 0; - for &(count, _) in vector.iter() { + for &(count, _) in &vector { total_count += count; } diff --git a/src/test/bench/shootout-mandelbrot.rs b/src/test/bench/shootout-mandelbrot.rs index 4a9c5a91dcf..754b891eb63 100644 --- a/src/test/bench/shootout-mandelbrot.rs +++ b/src/test/bench/shootout-mandelbrot.rs @@ -133,7 +133,7 @@ fn mandelbrot<W: old_io::Writer>(w: uint, mut out: W) -> old_io::IoResult<()> { (i + 1) * chunk_size }; - for &init_i in vec_init_i[start..end].iter() { + for &init_i in &vec_init_i[start..end] { write_line(init_i, init_r_slice, &mut res); } diff --git a/src/test/bench/shootout-meteor.rs b/src/test/bench/shootout-meteor.rs index d44948e4ed2..80a21402fe1 100644 --- a/src/test/bench/shootout-meteor.rs +++ b/src/test/bench/shootout-meteor.rs @@ -135,7 +135,7 @@ fn transform(piece: Vec<(i32, i32)> , all: bool) -> Vec<Vec<(i32, i32)>> { // dx) is on the board. fn mask(dy: i32, dx: i32, id: usize, p: &Vec<(i32, i32)>) -> Option<u64> { let mut m = 1 << (50 + id); - for &(y, x) in p.iter() { + for &(y, x) in p { let x = x + dx + (y + (dy % 2)) / 2; if x < 0 || x > 4 {return None;} let y = y + dy; @@ -184,7 +184,7 @@ fn is_board_unfeasible(board: u64, masks: &Vec<Vec<Vec<u64>>>) -> bool { if board & 1 << i != 0 { continue; } for (cur_id, pos_masks) in masks_at.iter().enumerate() { if board & 1 << (50 + cur_id) != 0 { continue; } - for &cur_m in pos_masks.iter() { + for &cur_m in pos_masks { if cur_m & board != 0 { continue; } coverable |= cur_m; // if every coordinates can be covered and every diff --git a/src/test/bench/shootout-nbody.rs b/src/test/bench/shootout-nbody.rs index b2161000322..ca43216c662 100644 --- a/src/test/bench/shootout-nbody.rs +++ b/src/test/bench/shootout-nbody.rs @@ -158,7 +158,7 @@ fn offset_momentum(bodies: &mut [Planet;N_BODIES]) { let mut px = 0.0; let mut py = 0.0; let mut pz = 0.0; - for bi in bodies.iter() { + for bi in &*bodies { px += bi.vx * bi.mass; py += bi.vy * bi.mass; pz += bi.vz * bi.mass; diff --git a/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs b/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs index ad77953bdd8..492fd4a2c84 100644 --- a/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs +++ b/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs @@ -25,7 +25,7 @@ fn main() { let f = Foo { a: [box 3, box 4, box 5], }; - for &a in f.a.iter() { //~ ERROR cannot move out + for &a in &f.a { //~ ERROR cannot move out } let x = Some(box 1); diff --git a/src/test/compile-fail/borrowck-for-loop-head-linkage.rs b/src/test/compile-fail/borrowck-for-loop-head-linkage.rs index d2f99ea696a..043ea0bf00e 100644 --- a/src/test/compile-fail/borrowck-for-loop-head-linkage.rs +++ b/src/test/compile-fail/borrowck-for-loop-head-linkage.rs @@ -12,7 +12,7 @@ use std::iter::repeat; fn main() { let mut vector = vec![1us, 2]; - for &x in vector.iter() { + for &x in &vector { let cap = vector.capacity(); vector.extend(repeat(0)); //~ ERROR cannot borrow vector[1us] = 5us; //~ ERROR cannot borrow diff --git a/src/test/compile-fail/borrowck-insert-during-each.rs b/src/test/compile-fail/borrowck-insert-during-each.rs index d729af844cb..2c634865983 100644 --- a/src/test/compile-fail/borrowck-insert-during-each.rs +++ b/src/test/compile-fail/borrowck-insert-during-each.rs @@ -17,7 +17,7 @@ struct Foo { impl Foo { pub fn foo<F>(&mut self, mut fun: F) where F: FnMut(&isize) { - for f in self.n.iter() { + for f in &self.n { fun(f); } } diff --git a/src/test/compile-fail/drop-with-active-borrows-1.rs b/src/test/compile-fail/drop-with-active-borrows-1.rs index 5f5fbd0f348..dc8deb04833 100644 --- a/src/test/compile-fail/drop-with-active-borrows-1.rs +++ b/src/test/compile-fail/drop-with-active-borrows-1.rs @@ -12,7 +12,7 @@ fn main() { let a = "".to_string(); let b: Vec<&str> = a.lines().collect(); drop(a); //~ ERROR cannot move out of `a` because it is borrowed - for s in b.iter() { + for s in &b { println!("{}", *s); } } diff --git a/src/test/compile-fail/issue-15480.rs b/src/test/compile-fail/issue-15480.rs index 1e4476e563b..30f58f909a0 100644 --- a/src/test/compile-fail/issue-15480.rs +++ b/src/test/compile-fail/issue-15480.rs @@ -14,7 +14,7 @@ fn main() { //~^ ERROR borrowed value does not live long enough ]; - for &&x in v.iter() { + for &&x in &v { println!("{}", x + 3); } } diff --git a/src/test/compile-fail/issue-18400.rs b/src/test/compile-fail/issue-18400.rs index f3b2b3d5667..015f1fa603a 100644 --- a/src/test/compile-fail/issue-18400.rs +++ b/src/test/compile-fail/issue-18400.rs @@ -22,7 +22,7 @@ impl<'a, T, S> Set<&'a [T]> for S where } fn set(&mut self, bits: &[T]) { - for &bit in bits.iter() { + for &bit in bits { self.set(bit) } } diff --git a/src/test/compile-fail/issue-2149.rs b/src/test/compile-fail/issue-2149.rs index 691660f8971..998b9587b96 100644 --- a/src/test/compile-fail/issue-2149.rs +++ b/src/test/compile-fail/issue-2149.rs @@ -15,7 +15,7 @@ trait vec_monad<A> { impl<A> vec_monad<A> for Vec<A> { fn bind<B, F>(&self, mut f: F) where F: FnMut(A) -> Vec<B> { let mut r = panic!(); - for elt in self.iter() { r = r + f(*elt); } + for elt in self { r = r + f(*elt); } //~^ ERROR the type of this value must be known } } diff --git a/src/test/compile-fail/issue-2150.rs b/src/test/compile-fail/issue-2150.rs index 68195985eec..a2711d532d2 100644 --- a/src/test/compile-fail/issue-2150.rs +++ b/src/test/compile-fail/issue-2150.rs @@ -16,7 +16,7 @@ fn fail_len(v: Vec<isize> ) -> usize { let mut i = 3; panic!(); - for x in v.iter() { i += 1us; } + for x in &v { i += 1us; } //~^ ERROR: unreachable statement return i; } diff --git a/src/test/compile-fail/issue-5100.rs b/src/test/compile-fail/issue-5100.rs index b051abbc7ff..474274c93fd 100644 --- a/src/test/compile-fail/issue-5100.rs +++ b/src/test/compile-fail/issue-5100.rs @@ -64,7 +64,7 @@ fn main() { ('c', 'd'), ('e', 'f')]; - for &(x,y) in v.iter() {} // should be OK + for &(x,y) in &v {} // should be OK // Make sure none of the errors above were fatal let x: char = true; //~ ERROR mismatched types diff --git a/src/test/compile-fail/issue-7573.rs b/src/test/compile-fail/issue-7573.rs index 897afb1c102..c15c556f5d6 100644 --- a/src/test/compile-fail/issue-7573.rs +++ b/src/test/compile-fail/issue-7573.rs @@ -32,7 +32,7 @@ pub fn remove_package_from_database() { }; list_database(push_id); - for l in lines_to_use.iter() { + for l in &lines_to_use { println!("{}", l.local_path); } @@ -41,7 +41,7 @@ pub fn remove_package_from_database() { pub fn list_database<F>(mut f: F) where F: FnMut(&CrateId) { let stuff = ["foo", "bar"]; - for l in stuff.iter() { + for l in &stuff { f(&CrateId::new(*l)); } } diff --git a/src/test/debuginfo/destructured-for-loop-variable.rs b/src/test/debuginfo/destructured-for-loop-variable.rs index 08062ce8966..163771a2362 100644 --- a/src/test/debuginfo/destructured-for-loop-variable.rs +++ b/src/test/debuginfo/destructured-for-loop-variable.rs @@ -170,14 +170,14 @@ fn main() { z: true }; - for &Struct { x, y, z } in [s].iter() { + for &Struct { x, y, z } in &[s] { zzz(); // #break } let tuple: (i8, u8, i16, u16, i32, u32, i64, u64, f32, f64) = (0x6f, 0x70, -113, 114, -115, 116, -117, 118, 119.5, 120.5); - for &(_i8, _u8, _i16, _u16, _i32, _u32, _i64, _u64, _f32, _f64) in [tuple].iter() { + for &(_i8, _u8, _i16, _u16, _i32, _u32, _i64, _u64, _f32, _f64) in &[tuple] { zzz(); // #break } diff --git a/src/test/debuginfo/lexical-scope-in-for-loop.rs b/src/test/debuginfo/lexical-scope-in-for-loop.rs index 1fa54e47163..fe5983cbb6a 100644 --- a/src/test/debuginfo/lexical-scope-in-for-loop.rs +++ b/src/test/debuginfo/lexical-scope-in-for-loop.rs @@ -94,7 +94,7 @@ fn main() { let x = 1000000; // wan meeeljen doollaars! - for &x in range.iter() { + for &x in &range { zzz(); // #break sentinel(); diff --git a/src/test/debuginfo/unreachable-locals.rs b/src/test/debuginfo/unreachable-locals.rs index 70f8b1ccd96..8bcb54af8ba 100644 --- a/src/test/debuginfo/unreachable-locals.rs +++ b/src/test/debuginfo/unreachable-locals.rs @@ -26,7 +26,7 @@ fn after_return() { match (20i32, 'c') { (a, ref b) => {} } - for a in [111i32].iter() {} + for a in &[111i32] {} } fn after_panic() { @@ -36,7 +36,7 @@ fn after_panic() { match (20i32, 'c') { (a, ref b) => {} } - for a in [111i32].iter() {} + for a in &[111i32] {} } fn after_diverging_function() { @@ -46,7 +46,7 @@ fn after_diverging_function() { match (20i32, 'c') { (a, ref b) => {} } - for a in [111i32].iter() {} + for a in &[111i32] {} } fn after_break() { @@ -57,7 +57,7 @@ fn after_break() { match (20i32, 'c') { (a, ref b) => {} } - for a in [111i32].iter() {} + for a in &[111i32] {} } } @@ -69,7 +69,7 @@ fn after_continue() { match (20i32, 'c') { (a, ref b) => {} } - for a in [111i32].iter() {} + for a in &[111i32] {} } } diff --git a/src/test/pretty/block-comment-wchar.pp b/src/test/pretty/block-comment-wchar.pp index fbdd15b6060..5a55cb4e561 100644 --- a/src/test/pretty/block-comment-wchar.pp +++ b/src/test/pretty/block-comment-wchar.pp @@ -109,7 +109,7 @@ fn main() { '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006', '\u2007', '\u2008', '\u2009', '\u200A', '\u2028', '\u2029', '\u202F', '\u205F', '\u3000']; - for c in chars.iter() { + for c in &chars { let ws = c.is_whitespace(); println!("{} {}" , c , ws); } diff --git a/src/test/pretty/block-comment-wchar.rs b/src/test/pretty/block-comment-wchar.rs index cc5640ce82a..c82bdcd8dcb 100644 --- a/src/test/pretty/block-comment-wchar.rs +++ b/src/test/pretty/block-comment-wchar.rs @@ -103,7 +103,7 @@ fn main() { '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006', '\u2007', '\u2008', '\u2009', '\u200A', '\u2028', '\u2029', '\u202F', '\u205F', '\u3000']; - for c in chars.iter() { + for c in &chars { let ws = c.is_whitespace(); println!("{} {}", c , ws); } diff --git a/src/test/pretty/for-comment.rs b/src/test/pretty/for-comment.rs index 2318e783b69..0f2a667e11c 100644 --- a/src/test/pretty/for-comment.rs +++ b/src/test/pretty/for-comment.rs @@ -12,7 +12,7 @@ fn f(v: &[int]) -> int { let mut n = 0; - for e in v.iter() { + for e in v { n = *e; // This comment once triggered pretty printer bug } diff --git a/src/test/run-pass/auto-loop.rs b/src/test/run-pass/auto-loop.rs index e5f4d078749..2cc7451e138 100644 --- a/src/test/run-pass/auto-loop.rs +++ b/src/test/run-pass/auto-loop.rs @@ -11,7 +11,7 @@ pub fn main() { let mut sum = 0; let xs = vec!(1, 2, 3, 4, 5); - for x in xs.iter() { + for x in &xs { sum += *x; } assert_eq!(sum, 15); diff --git a/src/test/run-pass/block-arg.rs b/src/test/run-pass/block-arg.rs index d017a0dbf9a..2f530331a2b 100644 --- a/src/test/run-pass/block-arg.rs +++ b/src/test/run-pass/block-arg.rs @@ -13,7 +13,7 @@ pub fn main() { let v = vec!(-1.0f64, 0.0, 1.0, 2.0, 3.0); // Statement form does not require parentheses: - for i in v.iter() { + for i in &v { println!("{}", *i); } diff --git a/src/test/run-pass/block-iter-1.rs b/src/test/run-pass/block-iter-1.rs index b5bd4d90c2e..d5d26f42ef0 100644 --- a/src/test/run-pass/block-iter-1.rs +++ b/src/test/run-pass/block-iter-1.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn iter_vec<T, F>(v: Vec<T> , mut f: F) where F: FnMut(&T) { for x in v.iter() { f(x); } } +fn iter_vec<T, F>(v: Vec<T> , mut f: F) where F: FnMut(&T) { for x in &v { f(x); } } pub fn main() { let v = vec![1i32, 2, 3, 4, 5, 6, 7]; diff --git a/src/test/run-pass/block-iter-2.rs b/src/test/run-pass/block-iter-2.rs index 348d9df6e7e..8c079ca4b07 100644 --- a/src/test/run-pass/block-iter-2.rs +++ b/src/test/run-pass/block-iter-2.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn iter_vec<T, F>(v: Vec<T>, mut f: F) where F: FnMut(&T) { for x in v.iter() { f(x); } } +fn iter_vec<T, F>(v: Vec<T>, mut f: F) where F: FnMut(&T) { for x in &v { f(x); } } pub fn main() { let v = vec![1i32, 2, 3, 4, 5]; diff --git a/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs b/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs index 05dffe91680..94c7c2b13ce 100644 --- a/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs +++ b/src/test/run-pass/borrowck-mut-vec-as-imm-slice.rs @@ -11,7 +11,7 @@ fn want_slice(v: &[int]) -> int { let mut sum = 0; - for i in v.iter() { sum += *i; } + for i in v { sum += *i; } sum } diff --git a/src/test/run-pass/break.rs b/src/test/run-pass/break.rs index bcfb8f6f914..6498c4b461d 100644 --- a/src/test/run-pass/break.rs +++ b/src/test/run-pass/break.rs @@ -15,7 +15,7 @@ pub fn main() { loop { i += 1; if i == 20 { break; } } assert_eq!(i, 20); let xs = [1, 2, 3, 4, 5, 6]; - for x in xs.iter() { + for x in &xs { if *x == 3 { break; } assert!((*x <= 3)); } i = 0; @@ -26,7 +26,7 @@ pub fn main() { if i >= 10 { break; } } let ys = vec!(1, 2, 3, 4, 5, 6); - for x in ys.iter() { + for x in &ys { if *x % 2 == 0 { continue; } assert!((*x % 2 != 0)); } diff --git a/src/test/run-pass/capture-clauses-boxed-closures.rs b/src/test/run-pass/capture-clauses-boxed-closures.rs index 6e8ed4fd5a1..5e696566bfa 100644 --- a/src/test/run-pass/capture-clauses-boxed-closures.rs +++ b/src/test/run-pass/capture-clauses-boxed-closures.rs @@ -9,7 +9,7 @@ // except according to those terms. fn each<T, F>(x: &[T], mut f: F) where F: FnMut(&T) { - for val in x.iter() { + for val in x { f(val) } } diff --git a/src/test/run-pass/capture-clauses-unboxed-closures.rs b/src/test/run-pass/capture-clauses-unboxed-closures.rs index cd40e2a7843..de1196e10d8 100644 --- a/src/test/run-pass/capture-clauses-unboxed-closures.rs +++ b/src/test/run-pass/capture-clauses-unboxed-closures.rs @@ -11,7 +11,7 @@ #![feature(unboxed_closures)] fn each<'a,T,F:FnMut(&'a T)>(x: &'a [T], mut f: F) { - for val in x.iter() { + for val in x { f(val) } } diff --git a/src/test/run-pass/cleanup-rvalue-for-scope.rs b/src/test/run-pass/cleanup-rvalue-for-scope.rs index 8969cca2610..f69a0332cc4 100644 --- a/src/test/run-pass/cleanup-rvalue-for-scope.rs +++ b/src/test/run-pass/cleanup-rvalue-for-scope.rs @@ -61,7 +61,7 @@ impl Drop for AddFlags { pub fn main() { // The array containing [AddFlags] should not be dropped until // after the for loop: - for x in [AddFlags(1)].iter() { + for x in &[AddFlags(1)] { check_flags(0); } check_flags(1); diff --git a/src/test/run-pass/coerce-reborrow-imm-vec-arg.rs b/src/test/run-pass/coerce-reborrow-imm-vec-arg.rs index f6abfda03e3..b6b30e2fe9b 100644 --- a/src/test/run-pass/coerce-reborrow-imm-vec-arg.rs +++ b/src/test/run-pass/coerce-reborrow-imm-vec-arg.rs @@ -10,7 +10,7 @@ fn sum(x: &[int]) -> int { let mut sum = 0; - for y in x.iter() { sum += *y; } + for y in x { sum += *y; } return sum; } diff --git a/src/test/run-pass/const-vec-of-fns.rs b/src/test/run-pass/const-vec-of-fns.rs index f21f7d1903c..0914402256d 100644 --- a/src/test/run-pass/const-vec-of-fns.rs +++ b/src/test/run-pass/const-vec-of-fns.rs @@ -22,7 +22,7 @@ static mut closures: &'static mut [S<fn()>] = &mut [S(f as fn()), S(f as fn())]; pub fn main() { unsafe { - for &bare_fn in bare_fns.iter() { bare_fn() } + for &bare_fn in bare_fns { bare_fn() } for closure in closures.iter_mut() { let S(ref mut closure) = *closure; (*closure)() diff --git a/src/test/run-pass/explicit-self-closures.rs b/src/test/run-pass/explicit-self-closures.rs index c7e5681c0c1..ef9dc377bc7 100644 --- a/src/test/run-pass/explicit-self-closures.rs +++ b/src/test/run-pass/explicit-self-closures.rs @@ -16,7 +16,7 @@ struct Box { impl Box { pub fn set_many(&mut self, xs: &[uint]) { - for x in xs.iter() { self.x = *x; } + for x in xs { self.x = *x; } } } diff --git a/src/test/run-pass/fn-pattern-expected-type-2.rs b/src/test/run-pass/fn-pattern-expected-type-2.rs index 8590c8b90b2..4e2c8facaf8 100644 --- a/src/test/run-pass/fn-pattern-expected-type-2.rs +++ b/src/test/run-pass/fn-pattern-expected-type-2.rs @@ -10,7 +10,7 @@ pub fn main() { let v : &[(int,int)] = &[ (1, 2), (3, 4), (5, 6) ]; - for &(x, y) in v.iter() { + for &(x, y) in v { println!("{}", y); println!("{}", x); } diff --git a/src/test/run-pass/for-destruct.rs b/src/test/run-pass/for-destruct.rs index 7cc8b22e061..7a9b8a45b2a 100644 --- a/src/test/run-pass/for-destruct.rs +++ b/src/test/run-pass/for-destruct.rs @@ -11,7 +11,7 @@ struct Pair { x: int, y: int } pub fn main() { - for elt in (vec!(Pair {x: 10, y: 20}, Pair {x: 30, y: 0})).iter() { + for elt in &(vec!(Pair {x: 10, y: 20}, Pair {x: 30, y: 0})) { assert_eq!(elt.x + elt.y, 30); } } diff --git a/src/test/run-pass/for-loop-goofiness.rs b/src/test/run-pass/for-loop-goofiness.rs index ae509dc0862..7754751120e 100644 --- a/src/test/run-pass/for-loop-goofiness.rs +++ b/src/test/run-pass/for-loop-goofiness.rs @@ -17,7 +17,7 @@ type Iterator = int; pub fn main() { let x = [ 3, 3, 3 ]; - for i in x.iter() { + for i in &x { assert_eq!(*i, 3); } } diff --git a/src/test/run-pass/for-loop-panic.rs b/src/test/run-pass/for-loop-panic.rs index d157da3139f..d2de1ed8c7e 100644 --- a/src/test/run-pass/for-loop-panic.rs +++ b/src/test/run-pass/for-loop-panic.rs @@ -9,4 +9,4 @@ // except according to those terms. -pub fn main() { let x: Vec<int> = Vec::new(); for _ in x.iter() { panic!("moop"); } } +pub fn main() { let x: Vec<int> = Vec::new(); for _ in &x { panic!("moop"); } } diff --git a/src/test/run-pass/foreach-external-iterators-break.rs b/src/test/run-pass/foreach-external-iterators-break.rs index 9cbb4f4107a..e9e8c3f0929 100644 --- a/src/test/run-pass/foreach-external-iterators-break.rs +++ b/src/test/run-pass/foreach-external-iterators-break.rs @@ -11,7 +11,7 @@ pub fn main() { let x = [1; 100]; let mut y = 0; - for i in x.iter() { + for i in &x[] { if y > 10 { break; } diff --git a/src/test/run-pass/foreach-external-iterators-hashmap-break-restart.rs b/src/test/run-pass/foreach-external-iterators-hashmap-break-restart.rs index 891cbf6696b..a4988bf016c 100644 --- a/src/test/run-pass/foreach-external-iterators-hashmap-break-restart.rs +++ b/src/test/run-pass/foreach-external-iterators-hashmap-break-restart.rs @@ -19,7 +19,7 @@ use std::collections::HashMap; pub fn main() { let mut h = HashMap::new(); let kvs = [(1, 10), (2, 20), (3, 30)]; - for &(k,v) in kvs.iter() { + for &(k,v) in &kvs { h.insert(k,v); } let mut x = 0; diff --git a/src/test/run-pass/foreach-external-iterators-hashmap.rs b/src/test/run-pass/foreach-external-iterators-hashmap.rs index 1878997de5a..ed4328d94fe 100644 --- a/src/test/run-pass/foreach-external-iterators-hashmap.rs +++ b/src/test/run-pass/foreach-external-iterators-hashmap.rs @@ -15,12 +15,12 @@ use std::collections::HashMap; pub fn main() { let mut h = HashMap::new(); let kvs = [(1, 10), (2, 20), (3, 30)]; - for &(k,v) in kvs.iter() { + for &(k,v) in &kvs { h.insert(k,v); } let mut x = 0; let mut y = 0; - for (&k,&v) in h.iter() { + for (&k,&v) in &h { x += k; y += v; } diff --git a/src/test/run-pass/foreach-external-iterators-nested.rs b/src/test/run-pass/foreach-external-iterators-nested.rs index 75471991620..6acfbc95317 100644 --- a/src/test/run-pass/foreach-external-iterators-nested.rs +++ b/src/test/run-pass/foreach-external-iterators-nested.rs @@ -13,8 +13,8 @@ pub fn main() { let y = [2; 100]; let mut p = 0; let mut q = 0; - for i in x.iter() { - for j in y.iter() { + for i in &x[] { + for j in &y[] { p += *j; } q += *i + p; diff --git a/src/test/run-pass/foreach-external-iterators.rs b/src/test/run-pass/foreach-external-iterators.rs index ef4692b2b51..2f154be659d 100644 --- a/src/test/run-pass/foreach-external-iterators.rs +++ b/src/test/run-pass/foreach-external-iterators.rs @@ -11,7 +11,7 @@ pub fn main() { let x = [1; 100]; let mut y = 0; - for i in x.iter() { + for i in &x[] { y += *i } assert!(y == 100); diff --git a/src/test/run-pass/generic-static-methods.rs b/src/test/run-pass/generic-static-methods.rs index 90a6349385d..7f84efcdd5d 100644 --- a/src/test/run-pass/generic-static-methods.rs +++ b/src/test/run-pass/generic-static-methods.rs @@ -16,7 +16,7 @@ trait vec_utils<T> { impl<T> vec_utils<T> for Vec<T> { fn map_<U, F>(x: &Vec<T> , mut f: F) -> Vec<U> where F: FnMut(&T) -> U { let mut r = Vec::new(); - for elt in x.iter() { + for elt in x { r.push(f(elt)); } r diff --git a/src/test/run-pass/hashmap-memory.rs b/src/test/run-pass/hashmap-memory.rs index 0e82ad43782..651ac632439 100644 --- a/src/test/run-pass/hashmap-memory.rs +++ b/src/test/run-pass/hashmap-memory.rs @@ -35,7 +35,7 @@ mod map_reduce { enum ctrl_proto { find_reducer(Vec<u8>, Sender<int>), mapper_done, } fn start_mappers(ctrl: Sender<ctrl_proto>, inputs: Vec<String>) { - for i in inputs.iter() { + for i in &inputs { let ctrl = ctrl.clone(); let i = i.clone(); Thread::spawn(move|| map_task(ctrl.clone(), i.clone()) ); diff --git a/src/test/run-pass/issue-10396.rs b/src/test/run-pass/issue-10396.rs index 7095812ce4b..b487608d4e6 100644 --- a/src/test/run-pass/issue-10396.rs +++ b/src/test/run-pass/issue-10396.rs @@ -14,7 +14,7 @@ enum Foo<'s> { } fn f(arr: &[&Foo]) { - for &f in arr.iter() { + for &f in arr { println!("{:?}", f); } } diff --git a/src/test/run-pass/issue-17068.rs b/src/test/run-pass/issue-17068.rs index dfbe7406229..a0e6f2c9be9 100644 --- a/src/test/run-pass/issue-17068.rs +++ b/src/test/run-pass/issue-17068.rs @@ -11,7 +11,7 @@ // Test that regionck creates the right region links in the pattern // binding of a for loop fn foo<'a>(v: &'a [uint]) -> &'a uint { - for &ref x in v.iter() { return x; } + for &ref x in v { return x; } unreachable!() } diff --git a/src/test/run-pass/issue-2904.rs b/src/test/run-pass/issue-2904.rs index 8a67d84cb64..b55f3691358 100644 --- a/src/test/run-pass/issue-2904.rs +++ b/src/test/run-pass/issue-2904.rs @@ -66,12 +66,12 @@ fn read_board_grid<rdr:'static + old_io::Reader>(mut input: rdr) let mut line = [0; 10]; input.read(&mut line); let mut row = Vec::new(); - for c in line.iter() { + for c in &line { row.push(square_from_char(*c as char)) } grid.push(row); let width = grid[0].len(); - for row in grid.iter() { assert!(row.len() == width) } + for row in &grid { assert!(row.len() == width) } grid } diff --git a/src/test/run-pass/issue-3389.rs b/src/test/run-pass/issue-3389.rs index 8f6b8137199..26558bdd30c 100644 --- a/src/test/run-pass/issue-3389.rs +++ b/src/test/run-pass/issue-3389.rs @@ -15,7 +15,7 @@ struct trie_node { } fn print_str_vector(vector: Vec<String> ) { - for string in vector.iter() { + for string in &vector { println!("{}", *string); } } diff --git a/src/test/run-pass/issue-3563-2.rs b/src/test/run-pass/issue-3563-2.rs index e688b95f873..2cf29296b85 100644 --- a/src/test/run-pass/issue-3563-2.rs +++ b/src/test/run-pass/issue-3563-2.rs @@ -11,7 +11,7 @@ trait Canvas { fn add_point(&self, point: &int); fn add_points(&self, shapes: &[int]) { - for pt in shapes.iter() { + for pt in shapes { self.add_point(pt) } } diff --git a/src/test/run-pass/issue-3563-3.rs b/src/test/run-pass/issue-3563-3.rs index 426a8ccf7e7..5d02a1b2bd2 100644 --- a/src/test/run-pass/issue-3563-3.rs +++ b/src/test/run-pass/issue-3563-3.rs @@ -122,7 +122,7 @@ trait Canvas { // Unlike interfaces traits support default implementations. // Got an ICE as soon as I added this method. fn add_points(&mut self, shapes: &[Point]) { - for pt in shapes.iter() {self.add_point(*pt)}; + for pt in shapes {self.add_point(*pt)}; } } diff --git a/src/test/run-pass/issue-4241.rs b/src/test/run-pass/issue-4241.rs index 19622cb88e3..89cf2f69b34 100644 --- a/src/test/run-pass/issue-4241.rs +++ b/src/test/run-pass/issue-4241.rs @@ -100,7 +100,7 @@ priv fn cmd_to_string(cmd: ~[String]) -> String { let mut res = "*".to_string(); res.push_str(cmd.len().to_string()); res.push_str("\r\n"); - for s in cmd.iter() { + for s in &cmd { res.push_str(["$".to_string(), s.len().to_string(), "\r\n".to_string(), (*s).clone(), "\r\n".to_string()].concat() ); } diff --git a/src/test/run-pass/issue-4542.rs b/src/test/run-pass/issue-4542.rs index 024ee8c6dfe..ae72de50d26 100644 --- a/src/test/run-pass/issue-4542.rs +++ b/src/test/run-pass/issue-4542.rs @@ -11,7 +11,7 @@ use std::os; pub fn main() { - for arg in os::args().iter() { + for arg in &os::args() { match (*arg).clone() { _s => { } } diff --git a/src/test/run-pass/issue-5666.rs b/src/test/run-pass/issue-5666.rs index e53f4c86923..4f304e3b436 100644 --- a/src/test/run-pass/issue-5666.rs +++ b/src/test/run-pass/issue-5666.rs @@ -31,7 +31,7 @@ pub fn main() { let bubbles = box Dog{name: "bubbles".to_string()}; let barker = [snoopy as Box<Barks>, bubbles as Box<Barks>]; - for pup in barker.iter() { + for pup in &barker { println!("{}", pup.bark()); } } diff --git a/src/test/run-pass/issue-5688.rs b/src/test/run-pass/issue-5688.rs index a4f39884846..9612c4bf181 100644 --- a/src/test/run-pass/issue-5688.rs +++ b/src/test/run-pass/issue-5688.rs @@ -23,7 +23,7 @@ struct X { vec: &'static [int] } static V: &'static [X] = &[X { vec: &[1, 2, 3] }]; pub fn main() { - for &v in V.iter() { + for &v in V { println!("{:?}", v.vec); } } diff --git a/src/test/run-pass/linear-for-loop.rs b/src/test/run-pass/linear-for-loop.rs index eda14222e91..22a29279a67 100644 --- a/src/test/run-pass/linear-for-loop.rs +++ b/src/test/run-pass/linear-for-loop.rs @@ -13,7 +13,7 @@ pub fn main() { let x = vec!(1, 2, 3); let mut y = 0; - for i in x.iter() { println!("{}", *i); y += *i; } + for i in &x { println!("{}", *i); y += *i; } println!("{}", y); assert_eq!(y, 6); let s = "hello there".to_string(); diff --git a/src/test/run-pass/loop-label-shadowing.rs b/src/test/run-pass/loop-label-shadowing.rs index cfe51fe7758..686a9a002ce 100644 --- a/src/test/run-pass/loop-label-shadowing.rs +++ b/src/test/run-pass/loop-label-shadowing.rs @@ -12,7 +12,7 @@ fn main() { let mut foo = Vec::new(); - 'foo: for i in [1, 2, 3].iter() { + 'foo: for i in &[1, 2, 3] { foo.push(i); } } diff --git a/src/test/run-pass/loop-scope.rs b/src/test/run-pass/loop-scope.rs index 1dc3700194c..88711a46059 100644 --- a/src/test/run-pass/loop-scope.rs +++ b/src/test/run-pass/loop-scope.rs @@ -11,6 +11,6 @@ pub fn main() { let x = vec!(10, 20, 30); let mut sum = 0; - for x in x.iter() { sum += *x; } + for x in &x { sum += *x; } assert_eq!(sum, 60); } diff --git a/src/test/run-pass/monad.rs b/src/test/run-pass/monad.rs index acd8078b1f4..457c0a35fd7 100644 --- a/src/test/run-pass/monad.rs +++ b/src/test/run-pass/monad.rs @@ -17,7 +17,7 @@ trait vec_monad<A> { impl<A> vec_monad<A> for Vec<A> { fn bind<B, F>(&self, mut f: F) -> Vec<B> where F: FnMut(&A) -> Vec<B> { let mut r = Vec::new(); - for elt in self.iter() { + for elt in self { r.extend(f(elt).into_iter()); } r diff --git a/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs b/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs index 7d6bbbe4240..36a41896bcd 100644 --- a/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs +++ b/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs @@ -17,7 +17,7 @@ fn test1() { fn test2() { let mut ints = [0; 32]; for i in ints.iter_mut() { *i += 22; } - for i in ints.iter() { assert!(*i == 22); } + for i in &ints { assert!(*i == 22); } } pub fn main() { diff --git a/src/test/run-pass/overloaded-index-assoc-list.rs b/src/test/run-pass/overloaded-index-assoc-list.rs index 7e3ff198c43..0064748e883 100644 --- a/src/test/run-pass/overloaded-index-assoc-list.rs +++ b/src/test/run-pass/overloaded-index-assoc-list.rs @@ -32,7 +32,7 @@ impl<K: PartialEq + std::fmt::Debug, V:Clone> Index<K> for AssociationList<K,V> type Output = V; fn index<'a>(&'a self, index: &K) -> &'a V { - for pair in self.pairs.iter() { + for pair in &self.pairs { if pair.key == *index { return &pair.value } diff --git a/src/test/run-pass/packed-struct-vec.rs b/src/test/run-pass/packed-struct-vec.rs index 847688ce045..ede94acd934 100644 --- a/src/test/run-pass/packed-struct-vec.rs +++ b/src/test/run-pass/packed-struct-vec.rs @@ -28,7 +28,7 @@ pub fn main() { assert_eq!(foos[i], Foo { bar: 1, baz: 2}); } - for &foo in foos.iter() { + for &foo in &foos { assert_eq!(foo, Foo { bar: 1, baz: 2 }); } } diff --git a/src/test/run-pass/regions-mock-tcx.rs b/src/test/run-pass/regions-mock-tcx.rs index 34ff7acfca4..be7db25201a 100644 --- a/src/test/run-pass/regions-mock-tcx.rs +++ b/src/test/run-pass/regions-mock-tcx.rs @@ -66,7 +66,7 @@ impl<'tcx,'ast> TypeContext<'tcx, 'ast> { } fn add_type(&mut self, s: TypeStructure<'tcx>) -> Type<'tcx> { - for &ty in self.types.iter() { + for &ty in &self.types { if *ty == s { return ty; } diff --git a/src/test/run-pass/repeated-vector-syntax.rs b/src/test/run-pass/repeated-vector-syntax.rs index 6048e0d8673..6a1384ff933 100644 --- a/src/test/run-pass/repeated-vector-syntax.rs +++ b/src/test/run-pass/repeated-vector-syntax.rs @@ -13,7 +13,7 @@ pub fn main() { let y = [ 0; 1 ]; print!("["); - for xi in x.iter() { + for xi in &x[] { print!("{:?}, ", &xi[]); } println!("]"); diff --git a/src/test/run-pass/shadow.rs b/src/test/run-pass/shadow.rs index dbe73d1b94a..6e03c1e4a80 100644 --- a/src/test/run-pass/shadow.rs +++ b/src/test/run-pass/shadow.rs @@ -15,7 +15,7 @@ fn foo(c: Vec<int> ) { match t::none::<int> { t::some::<int>(_) => { - for _i in c.iter() { + for _i in &c { println!("{}", a); let a = 17; b.push(a); diff --git a/src/test/run-pass/static-impl.rs b/src/test/run-pass/static-impl.rs index 44d77e440d1..ff37ccb81ed 100644 --- a/src/test/run-pass/static-impl.rs +++ b/src/test/run-pass/static-impl.rs @@ -47,10 +47,10 @@ trait vec_utils<T> { impl<T> vec_utils<T> for Vec<T> { fn length_(&self) -> uint { self.len() } - fn iter_<F>(&self, mut f: F) where F: FnMut(&T) { for x in self.iter() { f(x); } } + fn iter_<F>(&self, mut f: F) where F: FnMut(&T) { for x in self { f(x); } } fn map_<U, F>(&self, mut f: F) -> Vec<U> where F: FnMut(&T) -> U { let mut r = Vec::new(); - for elt in self.iter() { + for elt in self { r.push(f(elt)); } r diff --git a/src/test/run-pass/task-comm-3.rs b/src/test/run-pass/task-comm-3.rs index 306cc0ffcef..4aec3d608ac 100644 --- a/src/test/run-pass/task-comm-3.rs +++ b/src/test/run-pass/task-comm-3.rs @@ -51,7 +51,7 @@ fn test00() { // Read from spawned tasks... let mut sum = 0; - for _r in results.iter() { + for _r in &results { i = 0; while i < number_of_messages { let value = rx.recv().unwrap(); diff --git a/src/test/run-pass/trait-bounds-in-arc.rs b/src/test/run-pass/trait-bounds-in-arc.rs index 0089646d0a1..0b650d97e4f 100644 --- a/src/test/run-pass/trait-bounds-in-arc.rs +++ b/src/test/run-pass/trait-bounds-in-arc.rs @@ -97,20 +97,20 @@ pub fn main() { fn check_legs(arc: Arc<Vec<Box<Pet+Sync+Send>>>) { let mut legs = 0; - for pet in arc.iter() { + for pet in &*arc { legs += pet.num_legs(); } assert!(legs == 12); } fn check_names(arc: Arc<Vec<Box<Pet+Sync+Send>>>) { - for pet in arc.iter() { + for pet in &*arc { pet.name(box |name| { assert!(name.as_bytes()[0] == 'a' as u8 && name.as_bytes()[1] == 'l' as u8); }) } } fn check_pedigree(arc: Arc<Vec<Box<Pet+Sync+Send>>>) { - for pet in arc.iter() { + for pet in &*arc { assert!(pet.of_good_pedigree()); } } diff --git a/src/test/run-pass/trait-generic.rs b/src/test/run-pass/trait-generic.rs index e79b22f70cf..5f4b18df6e1 100644 --- a/src/test/run-pass/trait-generic.rs +++ b/src/test/run-pass/trait-generic.rs @@ -29,7 +29,7 @@ trait map<T> { impl<T> map<T> for Vec<T> { fn map<U, F>(&self, mut f: F) -> Vec<U> where F: FnMut(&T) -> U { let mut r = Vec::new(); - for i in self.iter() { + for i in self { r.push(f(i)); } r diff --git a/src/test/run-pass/vector-sort-panic-safe.rs b/src/test/run-pass/vector-sort-panic-safe.rs index aaefbc42d70..9d83c0b0079 100644 --- a/src/test/run-pass/vector-sort-panic-safe.rs +++ b/src/test/run-pass/vector-sort-panic-safe.rs @@ -73,7 +73,7 @@ pub fn main() { // ... and then panic on each and every single one. for panic_countdown in 0..count { // refresh the counters. - for c in drop_counts.iter() { + for c in &drop_counts { c.store(0, Ordering::Relaxed); } |
