diff options
| author | Niko Matsakis <niko@alum.mit.edu> | 2012-09-19 16:55:01 -0700 |
|---|---|---|
| committer | Niko Matsakis <niko@alum.mit.edu> | 2012-09-19 17:03:01 -0700 |
| commit | cfed923600e2f7ad34241501200d595abccdeb54 (patch) | |
| tree | d382eb144026703d9abee0e6a99b87b34e9bd138 | |
| parent | 1c39f1968c77a3d42b0fdb30a36cff4d94a17da2 (diff) | |
| download | rust-cfed923600e2f7ad34241501200d595abccdeb54.tar.gz rust-cfed923600e2f7ad34241501200d595abccdeb54.zip | |
demode the each() method on vec and other iterables.
110 files changed, 483 insertions, 456 deletions
diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index 07e9400a133..11ba5257f8d 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -325,9 +325,9 @@ fn load_crate(filename: &Path) -> Option<crate> { let mut attr_from = ~""; for m.each |item| { - match attr::get_meta_item_value_str(item) { + match attr::get_meta_item_value_str(*item) { Some(value) => { - let name = attr::get_meta_item_name(item); + let name = attr::get_meta_item_name(*item); match name { ~"vers" => attr_vers = value, @@ -530,7 +530,7 @@ fn load_one_source_package(src: source, p: map::HashMap<~str, json::Json>) { match p.find(~"tags") { Some(json::List(js)) => { for (*js).each |j| { - match j { + match *j { json::String(j) => vec::grow(tags, 1u, *j), _ => () } @@ -602,7 +602,7 @@ fn load_source_packages(c: &cargo, src: source) { match json::from_str(result::get(pkgstr)) { Ok(json::List(js)) => { for (*js).each |j| { - match j { + match *j { json::Dict(p) => { load_one_source_package(src, p); } @@ -715,7 +715,7 @@ fn configure(opts: options) -> cargo { fn for_each_package(c: &cargo, b: fn(source, package)) { for c.sources.each_value |v| { for v.packages.each |p| { - b(v, p); + b(v, *p); } } } @@ -773,7 +773,7 @@ fn install_one_crate(c: &cargo, path: &Path, cf: &Path) { !str::starts_with(option::get(ct.filename()), ~"lib")) { debug!(" bin: %s", ct.to_str()); - install_to_dir(ct, &c.bindir); + install_to_dir(*ct, &c.bindir); if c.opts.mode == system_mode { // FIXME (#2662): Put this file in PATH / symlink it so it can // be used as a generic executable @@ -781,7 +781,7 @@ fn install_one_crate(c: &cargo, path: &Path, cf: &Path) { } } else { debug!(" lib: %s", ct.to_str()); - install_to_dir(ct, &c.libdir); + install_to_dir(*ct, &c.libdir); } } } @@ -814,7 +814,7 @@ fn install_source(c: &cargo, path: &Path) { } for cratefiles.each |cf| { - match load_crate(&cf) { + match load_crate(cf) { None => loop, Some(crate) => { for crate.deps.each |query| { @@ -823,15 +823,15 @@ fn install_source(c: &cargo, path: &Path) { // condition") let wd = get_temp_workdir(c); - install_query(c, &wd, query); + install_query(c, &wd, *query); } os::change_dir(path); if c.opts.test { - test_one_crate(c, path, &cf); + test_one_crate(c, path, cf); } - install_one_crate(c, path, &cf); + install_one_crate(c, path, cf); } } } @@ -915,7 +915,7 @@ fn install_uuid(c: &cargo, wd: &Path, uuid: ~str) { } error(~"found multiple packages:"); for ps.each |elt| { - let (sname,p) = copy elt; + let (sname,p) = copy *elt; info(~" " + sname + ~"/" + p.uuid + ~" (" + p.name + ~")"); } } @@ -939,7 +939,7 @@ fn install_named(c: &cargo, wd: &Path, name: ~str) { } error(~"found multiple packages:"); for ps.each |elt| { - let (sname,p) = copy elt; + let (sname,p) = copy *elt; info(~" " + sname + ~"/" + p.uuid + ~" (" + p.name + ~")"); } } @@ -949,7 +949,7 @@ fn install_uuid_specific(c: &cargo, wd: &Path, src: ~str, uuid: ~str) { Some(s) => { for s.packages.each |p| { if p.uuid == uuid { - install_package(c, src, wd, p); + install_package(c, src, wd, *p); return; } } @@ -964,7 +964,7 @@ fn install_named_specific(c: &cargo, wd: &Path, src: ~str, name: ~str) { Some(s) => { for s.packages.each |p| { if p.name == name { - install_package(c, src, wd, p); + install_package(c, src, wd, *p); return; } } @@ -1002,22 +1002,22 @@ fn cmd_uninstall(c: &cargo) { if is_uuid(target) { for os::list_dir(lib).each |file| { - match str::find_str(file, ~"-" + target + ~"-") { - Some(_) => if !try_uninstall(&lib.push(file)) { return }, + match str::find_str(*file, ~"-" + target + ~"-") { + Some(_) => if !try_uninstall(&lib.push(*file)) { return }, None => () } } error(~"can't find package with uuid: " + target); } else { for os::list_dir(lib).each |file| { - match str::find_str(file, ~"lib" + target + ~"-") { - Some(_) => if !try_uninstall(&lib.push(file)) { return }, + match str::find_str(*file, ~"lib" + target + ~"-") { + Some(_) => if !try_uninstall(&lib.push(*file)) { return }, None => () } } for os::list_dir(bin).each |file| { - match str::find_str(file, target) { - Some(_) => if !try_uninstall(&lib.push(file)) { return }, + match str::find_str(*file, target) { + Some(_) => if !try_uninstall(&lib.push(*file)) { return }, None => () } } diff --git a/src/cargo/pgp.rs b/src/cargo/pgp.rs index 17f3a2060aa..17cb8dc6487 100644 --- a/src/cargo/pgp.rs +++ b/src/cargo/pgp.rs @@ -95,7 +95,7 @@ fn verify(root: &Path, data: &Path, sig: &Path, keyfp: ~str) -> bool { ~"--verify", sig.to_str(), data.to_str()]); let res = ~"Primary key fingerprint: " + keyfp; - for str::split_char(p.err, '\n').each |line| { + for str::split_char_each(p.err, '\n') |line| { if line == res { return true; } } return false; diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs index 02c933b6fa3..869f3e93395 100644 --- a/src/compiletest/compiletest.rs +++ b/src/compiletest/compiletest.rs @@ -138,7 +138,7 @@ fn make_tests(config: config) -> ~[test::TestDesc] { config.src_base.to_str()); let mut tests = ~[]; for os::list_dir_path(&config.src_base).each |file| { - let file = copy file; + let file = copy *file; debug!("inspecting file %s", file.to_str()); if is_test(config, file) { vec::push(tests, make_test(config, file)) @@ -160,11 +160,11 @@ fn is_test(config: config, testfile: &Path) -> bool { let mut valid = false; for valid_extensions.each |ext| { - if str::ends_with(name, ext) { valid = true; } + if str::ends_with(name, *ext) { valid = true; } } for invalid_prefixes.each |pre| { - if str::starts_with(name, pre) { valid = false; } + if str::starts_with(name, *pre) { valid = false; } } return valid; diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index cae9801b674..a6437598acf 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -219,7 +219,7 @@ fn check_error_patterns(props: test_props, let mut next_err_pat = props.error_patterns[next_err_idx]; let mut done = false; for str::split_char(procres.stderr, '\n').each |line| { - if str::contains(line, next_err_pat) { + if str::contains(*line, next_err_pat) { debug!("found error pattern %s", next_err_pat); next_err_idx += 1u; if next_err_idx == vec::len(props.error_patterns) { @@ -240,7 +240,7 @@ fn check_error_patterns(props: test_props, missing_patterns[0]), procres); } else { for missing_patterns.each |pattern| { - error(fmt!("error pattern '%s' not found!", pattern)); + error(fmt!("error pattern '%s' not found!", *pattern)); } fatal_procres(~"multiple error patterns not found", procres); } @@ -273,10 +273,10 @@ fn check_expected_errors(expected_errors: ~[errors::expected_error], for vec::eachi(expected_errors) |i, ee| { if !found_flags[i] { debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s", - prefixes[i], ee.kind, ee.msg, line); - if (str::starts_with(line, prefixes[i]) && - str::contains(line, ee.kind) && - str::contains(line, ee.msg)) { + prefixes[i], ee.kind, ee.msg, *line); + if (str::starts_with(*line, prefixes[i]) && + str::contains(*line, ee.kind) && + str::contains(*line, ee.msg)) { found_flags[i] = true; was_expected = true; break; @@ -285,13 +285,13 @@ fn check_expected_errors(expected_errors: ~[errors::expected_error], } // ignore this msg which gets printed at the end - if str::contains(line, ~"aborting due to") { + if str::contains(*line, ~"aborting due to") { was_expected = true; } - if !was_expected && is_compiler_error_or_warning(line) { + if !was_expected && is_compiler_error_or_warning(*line) { fatal_procres(fmt!("unexpected compiler error or warning: '%s'", - line), + *line), procres); } } diff --git a/src/fuzzer/fuzzer.rs b/src/fuzzer/fuzzer.rs index 11003129e82..700acb5434d 100644 --- a/src/fuzzer/fuzzer.rs +++ b/src/fuzzer/fuzzer.rs @@ -33,7 +33,7 @@ fn find_rust_files(files: &mut ~[Path], path: &Path) { && !contains(path.to_str(), ~"compile-fail") && !contains(path.to_str(), ~"build") { for os::list_dir_path(path).each |p| { - find_rust_files(files, p); + find_rust_files(files, *p); } } } @@ -463,7 +463,7 @@ fn content_is_dangerous_to_run(code: ~str) -> bool { ~"unsafe", ~"log"]; // python --> rust pipe deadlock? - for dangerous_patterns.each |p| { if contains(code, p) { return true; } } + for dangerous_patterns.each |p| { if contains(code, *p) { return true; } } return false; } @@ -471,7 +471,7 @@ fn content_is_dangerous_to_compile(code: ~str) -> bool { let dangerous_patterns = ~[~"xfail-test"]; - for dangerous_patterns.each |p| { if contains(code, p) { return true; } } + for dangerous_patterns.each |p| { if contains(code, *p) { return true; } } return false; } @@ -487,7 +487,7 @@ fn content_might_not_converge(code: ~str) -> bool { ~"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850 ]; - for confusing_patterns.each |p| { if contains(code, p) { return true; } } + for confusing_patterns.each |p| { if contains(code, *p) { return true; } } return false; } @@ -502,7 +502,7 @@ fn file_might_not_converge(filename: &Path) -> bool { for confusing_files.each |f| { - if contains(filename.to_str(), f) { + if contains(filename.to_str(), *f) { return true; } } @@ -540,8 +540,8 @@ fn check_roundtrip_convergence(code: @~str, maxIters: uint) { fn check_convergence(files: &[Path]) { error!("pp convergence tests: %u files", vec::len(files)); for files.each |file| { - if !file_might_not_converge(&file) { - let s = @result::get(io::read_whole_file_str(&file)); + if !file_might_not_converge(file) { + let s = @result::get(io::read_whole_file_str(file)); if !content_might_not_converge(*s) { error!("pp converge: %s", file.to_str()); // Change from 7u to 2u once @@ -555,13 +555,13 @@ fn check_convergence(files: &[Path]) { fn check_variants(files: &[Path], cx: context) { for files.each |file| { if cx.mode == tm_converge && - file_might_not_converge(&file) { + file_might_not_converge(file) { error!("Skipping convergence test based on\ file_might_not_converge"); loop; } - let s = @result::get(io::read_whole_file_str(&file)); + let s = @result::get(io::read_whole_file_str(file)); if contains(*s, ~"#") { loop; // Macros are confusing } @@ -572,11 +572,13 @@ fn check_variants(files: &[Path], cx: context) { loop; } - log(error, ~"check_variants: " + file.to_str()); + let file_str = file.to_str(); + + log(error, ~"check_variants: " + file_str); let sess = parse::new_parse_sess(option::None); let crate = parse::parse_crate_from_source_str( - file.to_str(), + file_str, s, ~[], sess); io::with_str_reader(*s, |rdr| { error!("%s", @@ -586,12 +588,12 @@ fn check_variants(files: &[Path], cx: context) { syntax::parse::token::mk_fake_ident_interner(), sess.span_diagnostic, crate, - file.to_str(), + file_str, rdr, a, pprust::no_ann(), - false) )) + false))) }); - check_variants_of_ast(*crate, sess.cm, &file, cx); + check_variants_of_ast(*crate, sess.cm, file, cx); } } diff --git a/src/libcore/dlist.rs b/src/libcore/dlist.rs index d0474673f83..7f4a42db641 100644 --- a/src/libcore/dlist.rs +++ b/src/libcore/dlist.rs @@ -678,7 +678,7 @@ mod tests { let mut x = 0; for l.each |i| { x += 1; - if (i == 3) { break; } + if (*i == 3) { break; } } assert x == 3; } diff --git a/src/libcore/dvec.rs b/src/libcore/dvec.rs index 482a326b74f..8f196b9e078 100644 --- a/src/libcore/dvec.rs +++ b/src/libcore/dvec.rs @@ -272,7 +272,7 @@ impl<A: Copy> DVec<A> { } }; - for ts.each |t| { vec::push(v, t) }; + for ts.each |t| { vec::push(v, *t) }; v } } diff --git a/src/libcore/iter-trait.rs b/src/libcore/iter-trait.rs index 27b525cba90..2bc79d20bd3 100644 --- a/src/libcore/iter-trait.rs +++ b/src/libcore/iter-trait.rs @@ -7,7 +7,7 @@ use inst::{IMPL_T, EACH, SIZE_HINT}; export extensions; impl<A> IMPL_T<A>: iter::BaseIter<A> { - pure fn each(blk: fn(A) -> bool) { EACH(self, blk) } + pure fn each(blk: fn(v: &A) -> bool) { EACH(self, blk) } pure fn size_hint() -> Option<uint> { SIZE_HINT(self) } } diff --git a/src/libcore/iter-trait/dlist.rs b/src/libcore/iter-trait/dlist.rs index ae6265409ca..fde6cf22a5e 100644 --- a/src/libcore/iter-trait/dlist.rs +++ b/src/libcore/iter-trait/dlist.rs @@ -8,12 +8,12 @@ type IMPL_T<A> = dlist::DList<A>; * e.g. breadth-first search with in-place enqueues), but removing the current * node is forbidden. */ -pure fn EACH<A>(self: IMPL_T<A>, f: fn(A) -> bool) { +pure fn EACH<A>(self: IMPL_T<A>, f: fn(v: &A) -> bool) { let mut link = self.peek_n(); while option::is_some(link) { let nobe = option::get(link); assert nobe.linked; - if !f(nobe.data) { break; } + if !f(&nobe.data) { break; } // Check (weakly) that the user didn't do a remove. if self.size == 0 { fail ~"The dlist became empty during iteration??" diff --git a/src/libcore/iter-trait/dvec.rs b/src/libcore/iter-trait/dvec.rs index 7284d02d3ac..0f51df7b545 100644 --- a/src/libcore/iter-trait/dvec.rs +++ b/src/libcore/iter-trait/dvec.rs @@ -6,7 +6,7 @@ type IMPL_T<A> = dvec::DVec<A>; * * Attempts to access this dvec during iteration will fail. */ -pure fn EACH<A>(self: IMPL_T<A>, f: fn(A) -> bool) { +pure fn EACH<A>(self: IMPL_T<A>, f: fn(v: &A) -> bool) { unsafe { do self.swap |v| { v.each(f); diff --git a/src/libcore/iter-trait/option.rs b/src/libcore/iter-trait/option.rs index 206efa85064..e1ffec0a7d7 100644 --- a/src/libcore/iter-trait/option.rs +++ b/src/libcore/iter-trait/option.rs @@ -1,10 +1,10 @@ #[allow(non_camel_case_types)] type IMPL_T<A> = Option<A>; -pure fn EACH<A>(self: IMPL_T<A>, f: fn(A) -> bool) { +pure fn EACH<A>(self: IMPL_T<A>, f: fn(v: &A) -> bool) { match self { None => (), - Some(a) => { f(a); } + Some(ref a) => { f(a); } } } diff --git a/src/libcore/iter.rs b/src/libcore/iter.rs index 7bc356ce812..aab6bc38d93 100644 --- a/src/libcore/iter.rs +++ b/src/libcore/iter.rs @@ -10,7 +10,7 @@ use cmp::{Eq, Ord}; type InitOp<T> = fn(uint) -> T; trait BaseIter<A> { - pure fn each(blk: fn(A) -> bool); + pure fn each(blk: fn(v: &A) -> bool); pure fn size_hint() -> Option<uint>; } @@ -69,21 +69,21 @@ trait Buildable<A> { pure fn eachi<A,IA:BaseIter<A>>(self: IA, blk: fn(uint, A) -> bool) { let mut i = 0u; for self.each |a| { - if !blk(i, a) { break; } + if !blk(i, *a) { break; } i += 1u; } } pure fn all<A,IA:BaseIter<A>>(self: IA, blk: fn(A) -> bool) -> bool { for self.each |a| { - if !blk(a) { return false; } + if !blk(*a) { return false; } } return true; } pure fn any<A,IA:BaseIter<A>>(self: IA, blk: fn(A) -> bool) -> bool { for self.each |a| { - if blk(a) { return true; } + if blk(*a) { return true; } } return false; } @@ -92,7 +92,7 @@ pure fn filter_to_vec<A:Copy,IA:BaseIter<A>>(self: IA, prd: fn(A) -> bool) -> ~[A] { do vec::build_sized_opt(self.size_hint()) |push| { for self.each |a| { - if prd(a) { push(a); } + if prd(*a) { push(*a); } } } } @@ -101,7 +101,7 @@ pure fn map_to_vec<A:Copy,B,IA:BaseIter<A>>(self: IA, op: fn(A) -> B) -> ~[B] { do vec::build_sized_opt(self.size_hint()) |push| { for self.each |a| { - push(op(a)); + push(op(*a)); } } } @@ -111,8 +111,8 @@ pure fn flat_map_to_vec<A:Copy,B:Copy,IA:BaseIter<A>,IB:BaseIter<B>>( do vec::build |push| { for self.each |a| { - for op(a).each |b| { - push(b); + for op(*a).each |b| { + push(*b); } } } @@ -121,7 +121,7 @@ pure fn flat_map_to_vec<A:Copy,B:Copy,IA:BaseIter<A>,IB:BaseIter<B>>( pure fn foldl<A,B,IA:BaseIter<A>>(self: IA, +b0: B, blk: fn(B, A) -> B) -> B { let mut b <- b0; for self.each |a| { - b = blk(b, a); + b = blk(b, *a); } move b } @@ -132,7 +132,7 @@ pure fn to_vec<A:Copy,IA:BaseIter<A>>(self: IA) -> ~[A] { pure fn contains<A:Eq,IA:BaseIter<A>>(self: IA, x: A) -> bool { for self.each |a| { - if a == x { return true; } + if *a == x { return true; } } return false; } @@ -152,7 +152,7 @@ pure fn position<A,IA:BaseIter<A>>(self: IA, f: fn(A) -> bool) { let mut i = 0; for self.each |a| { - if f(a) { return Some(i); } + if f(*a) { return Some(i); } i += 1; } return None; @@ -205,7 +205,7 @@ pure fn max<A:Copy Ord,IA:BaseIter<A>>(self: IA) -> A { pure fn find<A: Copy,IA:BaseIter<A>>(self: IA, p: fn(A) -> bool) -> Option<A> { for self.each |i| { - if p(i) { return Some(i) } + if p(*i) { return Some(*i) } } return None; } @@ -254,7 +254,7 @@ pure fn build_sized_opt<A,B: Buildable<A>>( fn map<T,IT: BaseIter<T>,U,BU: Buildable<U>>(v: IT, f: fn(T) -> U) -> BU { do build_sized_opt(v.size_hint()) |push| { for v.each() |elem| { - push(f(elem)); + push(f(*elem)); } } } @@ -292,8 +292,8 @@ pure fn append<T: Copy,IT: BaseIter<T>,BT: Buildable<T>>( let size_opt = lhs.size_hint().chain( |sz1| rhs.size_hint().map(|sz2| sz1+sz2)); do build_sized_opt(size_opt) |push| { - for lhs.each |x| { push(x); } - for rhs.each |x| { push(x); } + for lhs.each |x| { push(*x); } + for rhs.each |x| { push(*x); } } } @@ -303,6 +303,6 @@ pure fn append<T: Copy,IT: BaseIter<T>,BT: Buildable<T>>( pure fn copy_seq<T: Copy,IT: BaseIter<T>,BT: Buildable<T>>( v: IT) -> BT { do build_sized_opt(v.size_hint()) |push| { - for v.each |x| { push(x); } + for v.each |x| { push(*x); } } } diff --git a/src/libcore/os.rs b/src/libcore/os.rs index 6c6186459ac..21a6a06572a 100644 --- a/src/libcore/os.rs +++ b/src/libcore/os.rs @@ -529,7 +529,7 @@ fn walk_dir(p: &Path, f: fn((&Path)) -> bool) { fn walk_dir_(p: &Path, f: fn((&Path)) -> bool) -> bool { let mut keepgoing = true; do list_dir(p).each |q| { - let path = &p.push(q); + let path = &p.push(*q); if !f(path) { keepgoing = false; false diff --git a/src/libcore/path.rs b/src/libcore/path.rs index 80dfab3fbef..88d2526f310 100644 --- a/src/libcore/path.rs +++ b/src/libcore/path.rs @@ -203,7 +203,9 @@ impl PosixPath : GenericPath { pure fn push_many(cs: &[~str]) -> PosixPath { let mut v = copy self.components; for cs.each |e| { - let mut ss = str::split_nonempty(e, |c| windows::is_sep(c as u8)); + let mut ss = str::split_nonempty( + *e, + |c| windows::is_sep(c as u8)); unsafe { vec::push_all_move(v, move ss); } } PosixPath { components: move v, ..self } @@ -395,7 +397,9 @@ impl WindowsPath : GenericPath { pure fn push_many(cs: &[~str]) -> WindowsPath { let mut v = copy self.components; for cs.each |e| { - let mut ss = str::split_nonempty(e, |c| windows::is_sep(c as u8)); + let mut ss = str::split_nonempty( + *e, + |c| windows::is_sep(c as u8)); unsafe { vec::push_all_move(v, move ss); } } return WindowsPath { components: move v, ..self } @@ -430,13 +434,13 @@ pure fn normalize(components: &[~str]) -> ~[~str] { unsafe { for components.each |c| { unsafe { - if c == ~"." && components.len() > 1 { loop; } - if c == ~"" { loop; } - if c == ~".." && cs.len() != 0 { + if *c == ~"." && components.len() > 1 { loop; } + if *c == ~"" { loop; } + if *c == ~".." && cs.len() != 0 { vec::pop(cs); loop; } - vec::push(cs, copy c); + vec::push(cs, copy *c); } } } diff --git a/src/libcore/vec.rs b/src/libcore/vec.rs index c81baf52476..2fac80aa2e7 100644 --- a/src/libcore/vec.rs +++ b/src/libcore/vec.rs @@ -1874,9 +1874,9 @@ mod bytes { // required in the slice. impl<A> &[A]: iter::BaseIter<A> { - pure fn each(blk: fn(A) -> bool) { + pure fn each(blk: fn(v: &A) -> bool) { for each(self) |e| { - if (!blk(*e)) { + if (!blk(e)) { return; } } diff --git a/src/libstd/json.rs b/src/libstd/json.rs index b10c40f634a..b66d69d442f 100644 --- a/src/libstd/json.rs +++ b/src/libstd/json.rs @@ -61,7 +61,7 @@ fn to_writer(wr: io::Writer, j: Json) { wr.write_str(~", "); } first = false; - to_writer(wr, item); + to_writer(wr, *item); }; wr.write_char(']'); } @@ -122,7 +122,7 @@ fn to_writer_pretty(wr: io::Writer, j: Json, indent: uint) { wr.write_str(spaces(inner_indent)); } first = false; - to_writer_pretty(wr, item, inner_indent); + to_writer_pretty(wr, *item, inner_indent); }; // ] @@ -156,7 +156,7 @@ fn to_writer_pretty(wr: io::Writer, j: Json, indent: uint) { // k: v } let mut first = true; for sorted_pairs.each |kv| { - let (key, value) = kv; + let (key, value) = *kv; if !first { wr.write_str(~",\n"); wr.write_str(spaces(inner_indent)); diff --git a/src/libstd/net_url.rs b/src/libstd/net_url.rs index edd562fa1a7..493e0cc762c 100644 --- a/src/libstd/net_url.rs +++ b/src/libstd/net_url.rs @@ -199,7 +199,7 @@ fn encode_form_urlencoded(m: HashMap<~str, @DVec<@~str>>) -> ~str { first = false; } - out += #fmt("%s=%s", key, encode_plus(*value)); + out += #fmt("%s=%s", key, encode_plus(**value)); } } @@ -328,7 +328,7 @@ fn query_from_str(rawquery: &str) -> Query { let mut query: Query = ~[]; if str::len(rawquery) != 0 { for str::split_char(rawquery, '&').each |p| { - let (k, v) = split_char_first(p, '='); + let (k, v) = split_char_first(*p, '='); vec::push(query, (decode_component(k), decode_component(v))); }; } @@ -338,7 +338,7 @@ fn query_from_str(rawquery: &str) -> Query { fn query_to_str(+query: Query) -> ~str { let mut strvec = ~[]; for query.each |kv| { - let (k, v) = copy kv; + let (k, v) = copy *kv; strvec += ~[#fmt("%s=%s", encode_component(k), encode_component(v))]; }; return str::connect(strvec, ~"&"); diff --git a/src/libstd/smallintmap.rs b/src/libstd/smallintmap.rs index 5366774db37..ffa5aeef31e 100644 --- a/src/libstd/smallintmap.rs +++ b/src/libstd/smallintmap.rs @@ -70,7 +70,7 @@ impl<V: Copy> SmallIntMap<V>: map::Map<uint, V> { pure fn size() -> uint { let mut sz = 0u; for self.v.each |item| { - match item { + match *item { Some(_) => sz += 1u, _ => () } diff --git a/src/libstd/timer.rs b/src/libstd/timer.rs index 9b6a2b85852..eca9a1fc8a8 100644 --- a/src/libstd/timer.rs +++ b/src/libstd/timer.rs @@ -179,7 +179,7 @@ mod test { for iter::repeat(repeat) { for spec.each |spec| { - let (times, maxms) = spec; + let (times, maxms) = *spec; do task::spawn { use rand::*; let rng = Rng(); diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 7917ff4d886..d813c7e4b08 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -151,7 +151,7 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, for decl.inputs.each |a| { cx.map.insert(a.id, node_arg(/* FIXME (#2543) */ - copy a, cx.local_id)); + copy *a, cx.local_id)); cx.local_id += 1u; } match fk { @@ -220,14 +220,14 @@ fn map_item(i: @item, cx: ctx, v: vt) { item_impl(_, _, _, ms) => { let impl_did = ast_util::local_def(i.id); for ms.each |m| { - map_method(impl_did, extend(cx, i.ident), m, + map_method(impl_did, extend(cx, i.ident), *m, cx); } } item_enum(enum_definition, _) => { for enum_definition.variants.each |v| { cx.map.insert(v.node.id, node_variant( - /* FIXME (#2543) */ copy v, i, + /* FIXME (#2543) */ copy *v, i, extend(cx, i.ident))); } } @@ -238,7 +238,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { }; for nm.items.each |nitem| { cx.map.insert(nitem.id, - node_foreign_item(nitem, abi, + node_foreign_item(*nitem, abi, /* FIXME (#2543) */ if nm.sort == ast::named { extend(cx, i.ident) @@ -264,9 +264,9 @@ fn map_item(i: @item, cx: ctx, v: vt) { cx.map.insert(p.impl_id, node_item(i, item_path)); } for methods.each |tm| { - let id = ast_util::trait_method_to_ty_method(tm).id; + let id = ast_util::trait_method_to_ty_method(*tm).id; let d_id = ast_util::local_def(i.id); - cx.map.insert(id, node_trait_method(@tm, d_id, item_path)); + cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); } } _ => () @@ -310,7 +310,7 @@ fn map_view_item(vi: @view_item, cx: ctx, _v: vt) { (id, path_to_ident(pth)) } }; - cx.map.insert(id, node_export(vp, extend(cx, name))); + cx.map.insert(id, node_export(*vp, extend(cx, name))); }, _ => () } diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index ef8a4b602d1..41be872f060 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -312,7 +312,7 @@ fn split_trait_methods(trait_methods: ~[trait_method]) -> (~[ty_method], ~[@method]) { let mut reqd = ~[], provd = ~[]; for trait_methods.each |trt_method| { - match trt_method { + match *trt_method { required(tm) => vec::push(reqd, tm), provided(m) => vec::push(provd, m) } @@ -575,15 +575,23 @@ pure fn is_item_impl(item: @ast::item) -> bool { fn walk_pat(pat: @pat, it: fn(@pat)) { it(pat); match pat.node { - pat_ident(_, _, Some(p)) => walk_pat(p, it), - pat_rec(fields, _) | pat_struct(_, fields, _) => - for fields.each |f| { walk_pat(f.pat, it) }, - pat_enum(_, Some(s)) | pat_tup(s) => for s.each |p| { - walk_pat(p, it) - }, - pat_box(s) | pat_uniq(s) | pat_region(s) => walk_pat(s, it), - pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _, _) - | pat_enum(_, _) => () + pat_ident(_, _, Some(p)) => walk_pat(p, it), + pat_rec(fields, _) | pat_struct(_, fields, _) => { + for fields.each |f| { + walk_pat(f.pat, it) + } + } + pat_enum(_, Some(s)) | pat_tup(s) => { + for s.each |p| { + walk_pat(*p, it) + } + } + pat_box(s) | pat_uniq(s) | pat_region(s) => { + walk_pat(s, it) + } + pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _, _) | + pat_enum(_, _) => { + } } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 17e34db0426..fdcd1087935 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -91,7 +91,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value } // Get the meta_items from inside a vector of attributes fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] { let mut mitems = ~[]; - for attrs.each |a| { vec::push(mitems, attr_meta(a)); } + for attrs.each |a| { vec::push(mitems, attr_meta(*a)); } return mitems; } @@ -189,7 +189,7 @@ fn find_meta_items_by_name(metas: ~[@ast::meta_item], name: ~str) -> */ fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool { for haystack.each |item| { - if eq(item, needle) { return true; } + if eq(*item, needle) { return true; } } return false; } @@ -370,7 +370,7 @@ fn require_unique_names(diagnostic: span_handler, metas: ~[@ast::meta_item]) { let map = map::HashMap(); for metas.each |meta| { - let name = get_meta_item_name(meta); + let name = get_meta_item_name(*meta); // FIXME: How do I silence the warnings? --pcw (#2619) if map.contains_key(name) { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 2e3c556084f..ab34ed8368c 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -242,7 +242,7 @@ fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> ~str } fn get_filemap(cm: codemap, filename: ~str) -> filemap { - for cm.files.each |fm| { if fm.name == filename { return fm; } } + for cm.files.each |fm| { if fm.name == filename { return *fm; } } //XXjdm the following triggers a mismatched type bug // (or expected function, found _|_) fail; // ("asking for " + filename + " which we don't know about"); diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 66553162b25..217b8c9cf4b 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -219,8 +219,8 @@ fn highlight_lines(cm: codemap::codemap, sp: span, } // Print the offending lines for display_lines.each |line| { - io::stderr().write_str(fmt!("%s:%u ", fm.name, line + 1u)); - let s = codemap::get_line(fm, line as int) + ~"\n"; + io::stderr().write_str(fmt!("%s:%u ", fm.name, *line + 1u)); + let s = codemap::get_line(fm, *line as int) + ~"\n"; io::stderr().write_str(s); } if elided { diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 45e6bf43f4b..94b22d68cea 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -6,7 +6,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let mut res_str = ~""; for args.each |e| { res_str += *cx.parse_sess().interner.get( - expr_to_ident(cx, e, ~"expected an ident")); + expr_to_ident(cx, *e, ~"expected an ident")); } let res = cx.parse_sess().interner.intern(@res_str); diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 4c5435d1123..3ea0493239f 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -53,7 +53,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, fn make_flags(cx: ext_ctxt, sp: span, flags: ~[Flag]) -> @ast::expr { let mut tmp_expr = make_rt_path_expr(cx, sp, @~"flag_none"); for flags.each |f| { - let fstr = match f { + let fstr = match *f { FlagLeftJustify => ~"flag_left_justify", FlagLeftZeroPad => ~"flag_left_zero_pad", FlagSpaceForSign => ~"flag_space_for_sign", @@ -139,7 +139,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, _ => cx.span_unimpl(sp, unsupported) } for cnv.flags.each |f| { - match f { + match *f { FlagLeftJustify => (), FlagSignAlways => { if !is_signed_type(cnv) { @@ -196,7 +196,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, _ => debug!("param: none") } for c.flags.each |f| { - match f { + match *f { FlagLeftJustify => debug!("flag: left justify"), FlagLeftZeroPad => debug!("flag: left zero pad"), FlagSpaceForSign => debug!("flag: left space pad"), @@ -243,7 +243,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let mut piece_exprs = ~[]; let nargs = args.len(); for pieces.each |pc| { - match pc { + match *pc { PieceString(s) => { vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s)) } diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 600c30f1f8b..e2b08d089a7 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -202,7 +202,7 @@ impl state: to_type_decls { let mut items_msg = ~[]; for self.messages.each |m| { - let message(name, span, tys, this, next) = m; + let message(name, span, tys, this, next) = *m; let tys = match next { Some({state: next, tys: next_tys}) => { @@ -366,7 +366,7 @@ impl protocol: gen_init { for (copy self.states).each |s| { for s.ty_params.each |tp| { match params.find(|tpp| tp.ident == tpp.ident) { - None => vec::push(params, tp), + None => vec::push(params, *tp), _ => () } } @@ -382,7 +382,7 @@ impl protocol: gen_init { let fields = do (copy self.states).map_to_vec |s| { for s.ty_params.each |tp| { match params.find(|tpp| tp.ident == tpp.ident) { - None => vec::push(params, tp), + None => vec::push(params, *tp), _ => () } } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 373f16daeb0..fec8339cf1a 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -102,7 +102,7 @@ impl state { /// from this state. fn reachable(f: fn(state) -> bool) { for self.messages.each |m| { - match m { + match *m { message(_, _, _, _, Some({state: id, _})) => { let state = self.proto.get_state(id); if !f(state) { break } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index 8c7ef65d227..c9385b0c35c 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -92,7 +92,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> Option<~[U]> { let mut res = ~[]; for v.each |elem| { - match f(elem) { + match f(*elem) { None => return None, Some(fv) => vec::push(res, fv) } @@ -156,7 +156,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> Option<bindings> { let res = HashMap(); //need to do this first, to check vec lengths. for b.literal_ast_matchers.each |sel| { - match sel(match_expr(e)) { None => return None, _ => () } + match (*sel)(match_expr(e)) { None => return None, _ => () } } let mut never_mind: bool = false; for b.real_binders.each |key, val| { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index c0a8848139f..74c36dcf1b7 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -174,7 +174,7 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) match m { {node: match_tok(_), span: _} => (), {node: match_seq(more_ms, _, _, _, _), span: _} => { - for more_ms.each() |next_m| { n_rec(p_s, next_m, res, ret_val) }; + for more_ms.each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; } {node: match_nonterminal(bind_name, _, idx), span: sp} => { if ret_val.contains_key(bind_name) { @@ -186,7 +186,7 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) } } let ret_val = HashMap::<uint,@named_match>(); - for ms.each() |m| { n_rec(p_s, m, res, ret_val) } + for ms.each() |m| { n_rec(p_s, *m, res, ret_val) } return ret_val; } diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index 526b5101d34..c5e64654d7b 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -13,7 +13,7 @@ fn eval_crate_directives(cx: ctx, &view_items: ~[@ast::view_item], &items: ~[@ast::item]) { for cdirs.each |sub_cdir| { - eval_crate_directive(cx, sub_cdir, prefix, view_items, items); + eval_crate_directive(cx, *sub_cdir, prefix, view_items, items); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0902567e864..a1c1208d21c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2244,7 +2244,7 @@ impl parser { IMPORTS_AND_ITEMS_ALLOWED); for items.each |item| { - let decl = @spanned(item.span.lo, item.span.hi, decl_item(item)); + let decl = @spanned(item.span.lo, item.span.hi, decl_item(*item)); push(stmts, @spanned(item.span.lo, item.span.hi, stmt_decl(decl, self.get_id()))); } @@ -2699,7 +2699,7 @@ impl parser { } members(mms) => { for mms.each |mm| { - match mm { + match *mm { @field_member(struct_field) => vec::push(fields, struct_field), @method_member(the_method_member) => @@ -3090,7 +3090,7 @@ impl parser { } members(mms) => { for mms.each |mm| { - match mm { + match *mm { @field_member(struct_field) => vec::push(fields, struct_field), @method_member(the_method_member) => @@ -3163,7 +3163,7 @@ impl parser { seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_ty(false)); for arg_tys.each |ty| { - vec::push(args, {ty: ty, id: self.get_id()}); + vec::push(args, {ty: *ty, id: self.get_id()}); } kind = tuple_variant_kind(args); } else if self.eat(token::EQ) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 2bd5d155063..570915e657f 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -389,7 +389,7 @@ fn temporary_keyword_table() -> HashMap<~str, ()> { ~"self", ~"static", ]; for keys.each |word| { - words.insert(word, ()); + words.insert(*word, ()); } words } @@ -415,7 +415,7 @@ fn strict_keyword_table() -> HashMap<~str, ()> { ~"while" ]; for keys.each |word| { - words.insert(word, ()); + words.insert(*word, ()); } words } @@ -426,7 +426,7 @@ fn reserved_keyword_table() -> HashMap<~str, ()> { ~"be" ]; for keys.each |word| { - words.insert(word, ()); + words.insert(*word, ()); } words } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a87e3c4b52b..f41cdae7e03 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -281,7 +281,7 @@ fn commasep<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN)) { let mut first = true; for elts.each |elt| { if first { first = false; } else { word_space(s, ~","); } - op(s, elt); + op(s, *elt); } end(s); } @@ -293,12 +293,12 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN), let len = vec::len::<IN>(elts); let mut i = 0u; for elts.each |elt| { - maybe_print_comment(s, get_span(elt).hi); - op(s, elt); + maybe_print_comment(s, get_span(*elt).hi); + op(s, *elt); i += 1u; if i < len { word(s.s, ~","); - maybe_print_trailing_comment(s, get_span(elt), + maybe_print_trailing_comment(s, get_span(*elt), Some(get_span(elts[i]).hi)); space_if_not_bol(s); } @@ -314,18 +314,18 @@ fn commasep_exprs(s: ps, b: breaks, exprs: ~[@ast::expr]) { fn print_mod(s: ps, _mod: ast::_mod, attrs: ~[ast::attribute]) { print_inner_attributes(s, attrs); for _mod.view_items.each |vitem| { - print_view_item(s, vitem); + print_view_item(s, *vitem); } - for _mod.items.each |item| { print_item(s, item); } + for _mod.items.each |item| { print_item(s, *item); } } fn print_foreign_mod(s: ps, nmod: ast::foreign_mod, attrs: ~[ast::attribute]) { print_inner_attributes(s, attrs); for nmod.view_items.each |vitem| { - print_view_item(s, vitem); + print_view_item(s, *vitem); } - for nmod.items.each |item| { print_foreign_item(s, item); } + for nmod.items.each |item| { print_foreign_item(s, *item); } } fn print_region(s: ps, region: @ast::region, sep: ~str) { @@ -525,7 +525,7 @@ fn print_item(s: ps, &&item: @ast::item) { bopen(s); for methods.each |meth| { - print_method(s, meth); + print_method(s, *meth); } bclose(s, item.span); } @@ -540,7 +540,9 @@ fn print_item(s: ps, &&item: @ast::item) { } word(s.s, ~" "); bopen(s); - for methods.each |meth| { print_trait_method(s, meth); } + for methods.each |meth| { + print_trait_method(s, *meth); + } bclose(s, item.span); } ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => { @@ -549,7 +551,9 @@ fn print_item(s: ps, &&item: @ast::item) { print_ident(s, item.ident); cbox(s, indent_unit); popen(s); - for tts.each |tt| { print_tt(s, tt); } + for tts.each |tt| { + print_tt(s, *tt); + } pclose(s); end(s); } @@ -602,7 +606,7 @@ fn print_variants(s: ps, variants: ~[ast::variant], span: ast::span) { maybe_print_comment(s, v.span.lo); print_outer_attributes(s, v.node.attrs); ibox(s, indent_unit); - print_variant(s, v); + print_variant(s, *v); word(s.s, ~","); end(s); maybe_print_trailing_comment(s, v.span, None::<uint>); @@ -661,7 +665,7 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param], } } for struct_def.methods.each |method| { - print_method(s, method); + print_method(s, *method); } bclose(s, span); } @@ -675,7 +679,7 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param], /// expression arguments as expressions). It can be done! I think. fn print_tt(s: ps, tt: ast::token_tree) { match tt { - ast::tt_delim(tts) => for tts.each() |tt_elt| { print_tt(s, tt_elt); }, + ast::tt_delim(tts) => for tts.each() |tt_elt| { print_tt(s, *tt_elt); }, ast::tt_tok(_, tk) => { match tk { parse::token::IDENT(*) => { // don't let idents run together @@ -688,7 +692,7 @@ fn print_tt(s: ps, tt: ast::token_tree) { } ast::tt_seq(_, tts, sep, zerok) => { word(s.s, ~"$("); - for tts.each() |tt_elt| { print_tt(s, tt_elt); } + for tts.each() |tt_elt| { print_tt(s, *tt_elt); } word(s.s, ~")"); match sep { Some(tk) => word(s.s, parse::token::to_str(s.intr, tk)), @@ -767,7 +771,7 @@ fn print_outer_attributes(s: ps, attrs: ~[ast::attribute]) { let mut count = 0; for attrs.each |attr| { match attr.node.style { - ast::attr_outer => { print_attribute(s, attr); count += 1; } + ast::attr_outer => { print_attribute(s, *attr); count += 1; } _ => {/* fallthrough */ } } } @@ -779,7 +783,7 @@ fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) { for attrs.each |attr| { match attr.node.style { ast::attr_inner => { - print_attribute(s, attr); + print_attribute(s, *attr); if !attr.node.is_sugared_doc { word(s.s, ~";"); } @@ -870,9 +874,9 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type, print_inner_attributes(s, attrs); - for blk.node.view_items.each |vi| { print_view_item(s, vi); } + for blk.node.view_items.each |vi| { print_view_item(s, *vi); } for blk.node.stmts.each |st| { - print_stmt(s, *st); + print_stmt(s, **st); } match blk.node.expr { Some(expr) => { @@ -956,7 +960,7 @@ fn print_mac(s: ps, m: ast::mac) { print_path(s, pth, false); word(s.s, ~"!"); popen(s); - for tts.each() |tt| { print_tt(s, tt); } + for tts.each() |tt| { print_tt(s, *tt); } pclose(s); } ast::mac_ellipsis => word(s.s, ~"..."), @@ -1167,7 +1171,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { if first { first = false; } else { space(s.s); word_space(s, ~"|"); } - print_pat(s, p); + print_pat(s, *p); } space(s.s); match arm.guard { @@ -1445,7 +1449,7 @@ fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) { let mut first = true; for path.idents.each |id| { if first { first = false; } else { word(s.s, ~"::"); } - print_ident(s, id); + print_ident(s, *id); } if path.rp.is_some() || !path.types.is_empty() { if colons_before_params { word(s.s, ~"::"); } @@ -1599,12 +1603,12 @@ fn print_fn_args(s: ps, decl: ast::fn_decl, box(s, 0u, inconsistent); let mut first = true; for opt_self_ty.each |self_ty| { - first = !print_self_ty(s, self_ty); + first = !print_self_ty(s, *self_ty); } for decl.inputs.each |arg| { if first { first = false; } else { word_space(s, ~","); } - print_arg(s, arg); + print_arg(s, *arg); } for cap_items.each |cap_item| { @@ -1836,11 +1840,11 @@ fn print_ty_fn(s: ps, opt_proto: Option<ast::proto>, purity: ast::purity, box(s, 0u, inconsistent); let mut first = true; for opt_self_ty.each |self_ty| { - first = !print_self_ty(s, self_ty); + first = !print_self_ty(s, *self_ty); } for decl.inputs.each |arg| { if first { first = false; } else { word_space(s, ~","); } - print_arg(s, arg); + print_arg(s, *arg); } end(s); pclose(s); @@ -1988,7 +1992,7 @@ fn print_comment(s: ps, cmnt: comments::cmnt) { for cmnt.lines.each |line| { // Don't print empty lines because they will end up as trailing // whitespace - if str::is_not_empty(line) { word(s.s, line); } + if str::is_not_empty(*line) { word(s.s, *line); } hardbreak(s.s); } } @@ -2000,7 +2004,7 @@ fn print_comment(s: ps, cmnt: comments::cmnt) { } else { ibox(s, 0u); for cmnt.lines.each |line| { - if str::is_not_empty(line) { word(s.s, line); } + if str::is_not_empty(*line) { word(s.s, *line); } hardbreak(s.s); } end(s); diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 1a832bf5af0..15b9e34566f 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -21,7 +21,7 @@ fn mk<T:Eq IterBytes Hash Const Copy>() -> interner<T> { fn mk_prefill<T:Eq IterBytes Hash Const Copy>(init: ~[T]) -> interner<T> { let rv = mk(); - for init.each() |v| { rv.intern(v); } + for init.each() |v| { rv.intern(*v); } return rv; } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 4c48a2bea70..4392ed55219 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -98,7 +98,7 @@ fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) { match cd.node { cdir_src_mod(_, _) => (), cdir_dir_mod(_, cdirs, _) => for cdirs.each |cdir| { - visit_crate_directive(cdir, e, v); + visit_crate_directive(*cdir, e, v); }, cdir_view_item(vi) => v.visit_view_item(vi, e, v), cdir_syntax(_) => () @@ -106,8 +106,8 @@ fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) { } fn visit_mod<E>(m: _mod, _sp: span, _id: node_id, e: E, v: vt<E>) { - for m.view_items.each |vi| { v.visit_view_item(vi, e, v); } - for m.items.each |i| { v.visit_item(i, e, v); } + for m.view_items.each |vi| { v.visit_view_item(*vi, e, v); } + for m.items.each |i| { v.visit_item(*i, e, v); } } fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { } @@ -132,8 +132,8 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { } item_mod(m) => v.visit_mod(m, i.span, i.id, e, v), item_foreign_mod(nm) => { - for nm.view_items.each |vi| { v.visit_view_item(vi, e, v); } - for nm.items.each |ni| { v.visit_foreign_item(ni, e, v); } + for nm.view_items.each |vi| { v.visit_view_item(*vi, e, v); } + for nm.items.each |ni| { v.visit_foreign_item(*ni, e, v); } } item_ty(t, tps) => { v.visit_ty(t, e, v); @@ -150,7 +150,7 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { } v.visit_ty(ty, e, v); for methods.each |m| { - visit_method_helper(m, e, v) + visit_method_helper(*m, e, v) } } item_class(struct_def, tps) => { @@ -161,7 +161,7 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { v.visit_ty_params(tps, e, v); for traits.each |p| { visit_path(p.path, e, v); } for methods.each |m| { - v.visit_trait_method(m, e, v); + v.visit_trait_method(*m, e, v); } } item_mac(m) => visit_mac(m, e, v) @@ -198,7 +198,7 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) { v.visit_ty(f.node.mt.ty, e, v); }, ty_tup(ts) => for ts.each |tt| { - v.visit_ty(tt, e, v); + v.visit_ty(*tt, e, v); }, ty_fn(_, _, bounds, decl) => { for decl.inputs.each |a| { v.visit_ty(a.ty, e, v); } @@ -215,7 +215,7 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) { } fn visit_path<E>(p: @path, e: E, v: vt<E>) { - for p.types.each |tp| { v.visit_ty(tp, e, v); } + for p.types.each |tp| { v.visit_ty(*tp, e, v); } } fn visit_pat<E>(p: @pat, e: E, v: vt<E>) { @@ -223,7 +223,7 @@ fn visit_pat<E>(p: @pat, e: E, v: vt<E>) { pat_enum(path, children) => { visit_path(path, e, v); do option::iter(children) |children| { - for children.each |child| { v.visit_pat(child, e, v); }} + for children.each |child| { v.visit_pat(*child, e, v); }} } pat_rec(fields, _) => for fields.each |f| { v.visit_pat(f.pat, e, v) @@ -235,7 +235,7 @@ fn visit_pat<E>(p: @pat, e: E, v: vt<E>) { } } pat_tup(elts) => for elts.each |elt| { - v.visit_pat(elt, e, v) + v.visit_pat(*elt, e, v) }, pat_box(inner) | pat_uniq(inner) | pat_region(inner) => v.visit_pat(inner, e, v), @@ -333,12 +333,14 @@ fn visit_trait_method<E>(m: trait_method, e: E, v: vt<E>) { fn visit_struct_def<E>(sd: @struct_def, nm: ast::ident, tps: ~[ty_param], id: node_id, e: E, v: vt<E>) { for sd.fields.each |f| { - v.visit_struct_field(f, e, v); + v.visit_struct_field(*f, e, v); } for sd.methods.each |m| { - v.visit_struct_method(m, e, v); + v.visit_struct_method(*m, e, v); + } + for sd.traits.each |p| { + visit_path(p.path, e, v); } - for sd.traits.each |p| { visit_path(p.path, e, v); } do option::iter(sd.ctor) |ctor| { visit_class_ctor_helper(ctor, nm, tps, ast_util::local_def(id), e, v); }; @@ -356,8 +358,12 @@ fn visit_struct_method<E>(m: @method, e: E, v: vt<E>) { } fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) { - for b.node.view_items.each |vi| { v.visit_view_item(vi, e, v); } - for b.node.stmts.each |s| { v.visit_stmt(s, e, v); } + for b.node.view_items.each |vi| { + v.visit_view_item(*vi, e, v); + } + for b.node.stmts.each |s| { + v.visit_stmt(*s, e, v); + } visit_expr_opt(b.node.expr, e, v); } @@ -372,7 +378,7 @@ fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) { fn visit_decl<E>(d: @decl, e: E, v: vt<E>) { match d.node { decl_local(locs) => for locs.each |loc| { - v.visit_local(loc, e, v) + v.visit_local(*loc, e, v) }, decl_item(it) => v.visit_item(it, e, v) } @@ -383,7 +389,7 @@ fn visit_expr_opt<E>(eo: Option<@expr>, e: E, v: vt<E>) { } fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) { - for exprs.each |ex| { v.visit_expr(ex, e, v); } + for exprs.each |ex| { v.visit_expr(*ex, e, v); } } fn visit_mac<E>(m: mac, e: E, v: vt<E>) { @@ -414,7 +420,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { for flds.each |f| { v.visit_expr(f.node.expr, e, v); } visit_expr_opt(base, e, v); } - expr_tup(elts) => for elts.each |el| { v.visit_expr(el, e, v); }, + expr_tup(elts) => for elts.each |el| { v.visit_expr(*el, e, v); }, expr_call(callee, args, _) => { visit_exprs(args, e, v); v.visit_expr(callee, e, v); @@ -436,7 +442,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { expr_loop(b, _) => v.visit_block(b, e, v), expr_match(x, arms) => { v.visit_expr(x, e, v); - for arms.each |a| { v.visit_arm(a, e, v); } + for arms.each |a| { v.visit_arm(*a, e, v); } } expr_fn(proto, decl, body, cap_clause) => { v.visit_fn(fk_anon(proto, cap_clause), decl, body, @@ -458,7 +464,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { } expr_field(x, _, tys) => { v.visit_expr(x, e, v); - for tys.each |tp| { v.visit_ty(tp, e, v); } + for tys.each |tp| { v.visit_ty(*tp, e, v); } } expr_index(a, b) => { v.visit_expr(a, e, v); v.visit_expr(b, e, v); } expr_path(p) => visit_path(p, e, v), @@ -476,7 +482,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { } fn visit_arm<E>(a: arm, e: E, v: vt<E>) { - for a.pats.each |p| { v.visit_pat(p, e, v); } + for a.pats.each |p| { v.visit_pat(*p, e, v); } visit_expr_opt(a.guard, e, v); v.visit_block(a.body, e, v); } diff --git a/src/rustc/back/link.rs b/src/rustc/back/link.rs index c56cf30d689..374e56b94fd 100644 --- a/src/rustc/back/link.rs +++ b/src/rustc/back/link.rs @@ -400,17 +400,17 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, let linkage_metas = attr::find_linkage_metas(c.node.attrs); attr::require_unique_names(sess.diagnostic(), linkage_metas); for linkage_metas.each |meta| { - if attr::get_meta_item_name(meta) == ~"name" { - match attr::get_meta_item_value_str(meta) { + if attr::get_meta_item_name(*meta) == ~"name" { + match attr::get_meta_item_value_str(*meta) { Some(v) => { name = Some(v); } - None => vec::push(cmh_items, meta) + None => vec::push(cmh_items, *meta) } - } else if attr::get_meta_item_name(meta) == ~"vers" { - match attr::get_meta_item_value_str(meta) { + } else if attr::get_meta_item_name(*meta) == ~"vers" { + match attr::get_meta_item_value_str(*meta) { Some(v) => { vers = Some(v); } - None => vec::push(cmh_items, meta) + None => vec::push(cmh_items, *meta) } - } else { vec::push(cmh_items, meta); } + } else { vec::push(cmh_items, *meta); } } return {name: name, vers: vers, cmh_items: cmh_items}; } @@ -431,8 +431,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, let cmh_items = attr::sort_meta_items(metas.cmh_items); symbol_hasher.reset(); - for cmh_items.each |m_| { - let m = m_; + for cmh_items.each |m| { match m.node { ast::meta_name_value(key, value) => { symbol_hasher.write_str(len_and_str(key)); @@ -449,7 +448,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, } for dep_hashes.each |dh| { - symbol_hasher.write_str(len_and_str(dh)); + symbol_hasher.write_str(len_and_str(*dh)); } return truncated_hash_result(symbol_hasher); @@ -576,7 +575,7 @@ fn mangle(sess: session, ss: path) -> ~str { let mut n = ~"_ZN"; // Begin name-sequence. for ss.each |s| { - match s { path_name(s) | path_mod(s) => { + match *s { path_name(s) | path_mod(s) => { let sani = sanitize(sess.str_of(s)); n += fmt!("%u%s", str::len(sani), sani); } } @@ -696,7 +695,7 @@ fn link_binary(sess: session, } let ula = cstore::get_used_link_args(cstore); - for ula.each |arg| { vec::push(cc_args, arg); } + for ula.each |arg| { vec::push(cc_args, *arg); } // # Extern library linking @@ -711,7 +710,7 @@ fn link_binary(sess: session, // The names of the extern libraries let used_libs = cstore::get_used_libraries(cstore); - for used_libs.each |l| { vec::push(cc_args, ~"-l" + l); } + for used_libs.each |l| { vec::push(cc_args, ~"-l" + *l); } if sess.building_library { vec::push(cc_args, lib_cmd); diff --git a/src/rustc/back/rpath.rs b/src/rustc/back/rpath.rs index 5dcc4a5eca3..5af2987cb69 100644 --- a/src/rustc/back/rpath.rs +++ b/src/rustc/back/rpath.rs @@ -172,7 +172,7 @@ fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] { for rpaths.each |rpath| { let s = rpath.to_str(); if !set.contains_key(s) { - vec::push(minimized, rpath); + vec::push(minimized, *rpath); set.insert(s, ()); } } diff --git a/src/rustc/back/upcall.rs b/src/rustc/back/upcall.rs index 7adadb5d810..f289a0bdf27 100644 --- a/src/rustc/back/upcall.rs +++ b/src/rustc/back/upcall.rs @@ -28,7 +28,7 @@ fn declare_upcalls(targ_cfg: @session::config, tys: ~[TypeRef], rv: TypeRef) -> ValueRef { let mut arg_tys: ~[TypeRef] = ~[]; - for tys.each |t| { vec::push(arg_tys, t); } + for tys.each |t| { vec::push(arg_tys, *t); } let fn_ty = T_fn(arg_tys, rv); return base::decl_cdecl_fn(llmod, prefix + name, fn_ty); } diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index abaee09790e..7ccad3b717d 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -93,7 +93,9 @@ fn parse_cfgspecs(cfgspecs: ~[~str]) -> ast::crate_cfg { // varieties of meta_item here. At the moment we just support the // meta_word variant. let mut words = ~[]; - for cfgspecs.each |s| { vec::push(words, attr::mk_word_item(s)); } + for cfgspecs.each |s| { + vec::push(words, attr::mk_word_item(*s)); + } return words; } @@ -453,19 +455,19 @@ fn build_session_options(binary: ~str, let mut lint_opts = ~[]; let lint_dict = lint::get_lint_dict(); for lint_levels.each |level| { - let level_name = lint::level_to_str(level); + let level_name = lint::level_to_str(*level); let level_short = level_name.substr(0,1).to_upper(); let flags = vec::append(getopts::opt_strs(matches, level_short), getopts::opt_strs(matches, level_name)); for flags.each |lint_name| { - let lint_name = str::replace(lint_name, ~"-", ~"_"); + let lint_name = str::replace(*lint_name, ~"-", ~"_"); match lint_dict.find(lint_name) { None => { early_error(demitter, fmt!("unknown %s flag: %s", level_name, lint_name)); } Some(lint) => { - vec::push(lint_opts, (lint.lint, level)); + vec::push(lint_opts, (lint.lint, *level)); } } } @@ -477,11 +479,11 @@ fn build_session_options(binary: ~str, for debug_flags.each |debug_flag| { let mut this_bit = 0u; for debug_map.each |pair| { - let (name, _, bit) = pair; - if name == debug_flag { this_bit = bit; break; } + let (name, _, bit) = *pair; + if name == *debug_flag { this_bit = bit; break; } } if this_bit == 0u { - early_error(demitter, fmt!("unknown debug flag: %s", debug_flag)) + early_error(demitter, fmt!("unknown debug flag: %s", *debug_flag)) } debugging_opts |= this_bit; } diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs index 56eb6fe762c..d34d92b1534 100644 --- a/src/rustc/driver/rustc.rs +++ b/src/rustc/driver/rustc.rs @@ -111,7 +111,7 @@ fn describe_warnings() { fn describe_debug_flags() { io::println(fmt!("\nAvailable debug options:\n")); for session::debugging_opts_map().each |pair| { - let (name, desc, _) = pair; + let (name, desc, _) = *pair; io::println(fmt!(" -Z%-20s -- %s", name, desc)); } } @@ -267,7 +267,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { to get further details and report the results \ to github.com/mozilla/rust/issues" ]/_.each |note| { - diagnostic::emit(None, note, diagnostic::note) + diagnostic::emit(None, *note, diagnostic::note) } } // Fail so the process returns a failure code diff --git a/src/rustc/front/config.rs b/src/rustc/front/config.rs index ac970faf954..7599d94e0f6 100644 --- a/src/rustc/front/config.rs +++ b/src/rustc/front/config.rs @@ -142,7 +142,7 @@ fn metas_in_cfg(cfg: ast::crate_cfg, metas: ~[@ast::meta_item]) -> bool { if !has_cfg_metas { return true; } for cfg_metas.each |cfg_mi| { - if attr::contains(cfg, cfg_mi) { return true; } + if attr::contains(cfg, *cfg_mi) { return true; } } return false; diff --git a/src/rustc/front/test.rs b/src/rustc/front/test.rs index 3d6dde880ef..27c070cd201 100644 --- a/src/rustc/front/test.rs +++ b/src/rustc/front/test.rs @@ -285,7 +285,7 @@ fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr { debug!("building test vector from %u tests", cx.testfns.len()); let mut descs = ~[]; for cx.testfns.each |test| { - vec::push(descs, mk_test_desc_rec(cx, test)); + vec::push(descs, mk_test_desc_rec(cx, *test)); } let inner_expr = @{id: cx.sess.next_node_id(), diff --git a/src/rustc/lib/llvm.rs b/src/rustc/lib/llvm.rs index 4ec6f36a8eb..b6ee29b8a1b 100644 --- a/src/rustc/lib/llvm.rs +++ b/src/rustc/lib/llvm.rs @@ -1080,7 +1080,7 @@ fn type_to_str_inner(names: type_names, outer0: ~[TypeRef], ty: TypeRef) -> let mut first: bool = true; for tys.each |t| { if first { first = false; } else { s += ~", "; } - s += type_to_str_inner(names, outer, t); + s += type_to_str_inner(names, outer, *t); } return s; } @@ -1130,7 +1130,7 @@ fn type_to_str_inner(names: type_names, outer0: ~[TypeRef], ty: TypeRef) -> let mut i: uint = 0u; for outer0.each |tout| { i += 1u; - if tout as int == ty as int { + if *tout as int == ty as int { let n: uint = vec::len::<TypeRef>(outer0) - i; return ~"*\\" + int::str(n as int); } diff --git a/src/rustc/metadata/creader.rs b/src/rustc/metadata/creader.rs index 017dbf6836f..03e15487ab9 100644 --- a/src/rustc/metadata/creader.rs +++ b/src/rustc/metadata/creader.rs @@ -150,7 +150,7 @@ fn visit_item(e: env, i: @ast::item) { } for link_args.each |a| { - match attr::get_meta_item_value_str(attr::attr_meta(a)) { + match attr::get_meta_item_value_str(attr::attr_meta(*a)) { Some(linkarg) => { cstore::add_used_link_args(cstore, linkarg); } diff --git a/src/rustc/metadata/decoder.rs b/src/rustc/metadata/decoder.rs index cf76cf110e9..61cf9646762 100644 --- a/src/rustc/metadata/decoder.rs +++ b/src/rustc/metadata/decoder.rs @@ -595,7 +595,7 @@ fn get_enum_variants(intr: ident_interner, cdata: cmd, id: ast::node_id, _ => { /* empty */ } } vec::push(infos, @{args: arg_tys, ctor_ty: ctor_ty, name: name, - id: did, disr_val: disr_val}); + id: *did, disr_val: disr_val}); disr_val += 1; } return infos; @@ -881,7 +881,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] { fn list_meta_items(intr: ident_interner, meta_items: ebml::Doc, out: io::Writer) { for get_meta_items(meta_items).each |mi| { - out.write_str(fmt!("%s\n", pprust::meta_item_to_str(mi, intr))); + out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr))); } } @@ -890,7 +890,7 @@ fn list_crate_attributes(intr: ident_interner, md: ebml::Doc, hash: ~str, out.write_str(fmt!("=Crate Attributes (%s)=\n", hash)); for get_attributes(md).each |attr| { - out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr))); + out.write_str(fmt!("%s\n", pprust::attribute_to_str(*attr, intr))); } out.write_str(~"\n\n"); diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 04f2bdcf636..1b9fc64dd86 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -100,7 +100,7 @@ fn encode_region_param(ecx: @encode_ctxt, ebml_w: ebml::Writer, let opt_rp = ecx.tcx.region_paramd_items.find(it.id); for opt_rp.each |rp| { do ebml_w.wr_tag(tag_region_param) { - ty::serialize_region_variance(ebml_w, rp); + ty::serialize_region_variance(ebml_w, *rp); } } } @@ -153,7 +153,7 @@ fn encode_ty_type_param_bounds(ebml_w: ebml::Writer, ecx: @encode_ctxt, abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; for params.each |param| { ebml_w.start_tag(tag_items_data_item_ty_param_bounds); - tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, param); + tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, *param); ebml_w.end_tag(); } } @@ -421,7 +421,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::Writer, debug!("encode_info_for_class: doing %s %d", ecx.tcx.sess.str_of(m.ident), m.id); encode_info_for_method(ecx, ebml_w, impl_path, - should_inline(m.attrs), id, m, + should_inline(m.attrs), id, *m, vec::append(class_tps, m.tps)); } _ => { /* don't encode private methods */ } @@ -633,7 +633,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item, encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); encode_region_param(ecx, ebml_w, item); for struct_def.traits.each |t| { - encode_trait_ref(ebml_w, ecx, t); + encode_trait_ref(ebml_w, ecx, *t); } /* Encode the dtor */ /* Encode id for dtor */ @@ -696,7 +696,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item, }); encode_info_for_ctor(ecx, ebml_w, ctor.node.id, item.ident, path, if tps.len() > 0u { - Some(ii_ctor(ctor, item.ident, tps, + Some(ii_ctor(*ctor, item.ident, tps, local_def(item.id))) } else { None }, tps); } @@ -727,7 +727,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item, for methods.each |m| { vec::push(*index, {val: m.id, pos: ebml_w.writer.tell()}); encode_info_for_method(ecx, ebml_w, impl_path, - should_inline(m.attrs), item.id, m, + should_inline(m.attrs), item.id, *m, vec::append(tps, m.tps)); } } @@ -764,7 +764,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item, } encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); for traits.each |associated_trait| { - encode_trait_ref(ebml_w, ecx, associated_trait) + encode_trait_ref(ebml_w, ecx, *associated_trait) } ebml_w.end_tag(); @@ -874,12 +874,12 @@ fn create_index<T: Copy Hash IterBytes>(index: ~[entry<T>]) -> for uint::range(0u, 256u) |_i| { vec::push(buckets, @mut ~[]); }; for index.each |elt| { let h = elt.val.hash() as uint; - vec::push(*buckets[h % 256], elt); + vec::push(*buckets[h % 256], *elt); } let mut buckets_frozen = ~[]; for buckets.each |bucket| { - vec::push(buckets_frozen, @*bucket); + vec::push(buckets_frozen, @**bucket); } return buckets_frozen; } @@ -893,7 +893,7 @@ fn encode_index<T>(ebml_w: ebml::Writer, buckets: ~[@~[entry<T>]], for buckets.each |bucket| { vec::push(bucket_locs, ebml_w.writer.tell()); ebml_w.start_tag(tag_index_buckets_bucket); - for vec::each(*bucket) |elt| { + for vec::each(**bucket) |elt| { ebml_w.start_tag(tag_index_buckets_bucket_elt); assert elt.pos < 0xffff_ffff; writer.write_be_u32(elt.pos as u32); @@ -905,8 +905,8 @@ fn encode_index<T>(ebml_w: ebml::Writer, buckets: ~[@~[entry<T>]], ebml_w.end_tag(); ebml_w.start_tag(tag_index_table); for bucket_locs.each |pos| { - assert pos < 0xffff_ffff; - writer.write_be_u32(pos as u32); + assert *pos < 0xffff_ffff; + writer.write_be_u32(*pos as u32); } ebml_w.end_tag(); ebml_w.end_tag(); @@ -949,7 +949,7 @@ fn encode_meta_item(ebml_w: ebml::Writer, mi: meta_item) { ebml_w.writer.write(str::to_bytes(name)); ebml_w.end_tag(); for items.each |inner_item| { - encode_meta_item(ebml_w, *inner_item); + encode_meta_item(ebml_w, **inner_item); } ebml_w.end_tag(); } @@ -1000,15 +1000,15 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] { for crate.node.attrs.each |attr| { vec::push( attrs, - if attr::get_attr_name(attr) != ~"link" { - attr + if attr::get_attr_name(*attr) != ~"link" { + *attr } else { match attr.node.value.node { meta_list(_, l) => { found_link_attr = true;; synthesize_link_attr(ecx, l) } - _ => attr + _ => *attr } }); } @@ -1059,7 +1059,7 @@ fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::Writer, // but is enough to get transitive crate dependencies working. ebml_w.start_tag(tag_crate_deps); for get_ordered_deps(ecx, cstore).each |dep| { - encode_crate_dep(ecx, ebml_w, dep); + encode_crate_dep(ecx, ebml_w, *dep); } ebml_w.end_tag(); } @@ -1149,7 +1149,7 @@ fn encode_metadata(parms: encode_parms, crate: @crate) -> ~[u8] { do wr.buf.borrow |v| { do v.each |e| { - if e == 0 { + if *e == 0 { ecx.stats.zero_bytes += 1; } true diff --git a/src/rustc/metadata/filesearch.rs b/src/rustc/metadata/filesearch.rs index 0384225de12..25abcb1b7eb 100644 --- a/src/rustc/metadata/filesearch.rs +++ b/src/rustc/metadata/filesearch.rs @@ -71,9 +71,9 @@ fn search<T: Copy>(filesearch: filesearch, pick: pick<T>) -> Option<T> { let mut rslt = None; for filesearch.lib_search_paths().each |lib_search_path| { debug!("searching %s", lib_search_path.to_str()); - for os::list_dir_path(&lib_search_path).each |path| { + for os::list_dir_path(lib_search_path).each |path| { debug!("testing %s", path.to_str()); - let maybe_picked = pick(path); + let maybe_picked = pick(*path); if option::is_some(maybe_picked) { debug!("picked %s", path.to_str()); rslt = maybe_picked; diff --git a/src/rustc/metadata/loader.rs b/src/rustc/metadata/loader.rs index 592fb5bdf49..c45b189d697 100644 --- a/src/rustc/metadata/loader.rs +++ b/src/rustc/metadata/loader.rs @@ -139,7 +139,7 @@ fn note_linkage_attrs(intr: ident_interner, diag: span_handler, attrs: ~[ast::attribute]) { for attr::find_linkage_metas(attrs).each |mi| { diag.handler().note(fmt!("meta: %s", - pprust::meta_item_to_str(mi,intr))); + pprust::meta_item_to_str(*mi,intr))); } } @@ -161,7 +161,7 @@ fn metadata_matches(extern_metas: ~[@ast::meta_item], vec::len(local_metas), vec::len(extern_metas)); for local_metas.each |needed| { - if !attr::contains(extern_metas, needed) { + if !attr::contains(extern_metas, *needed) { return false; } } diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs index df67749bc08..88d83ca23f4 100644 --- a/src/rustc/metadata/tyencode.rs +++ b/src/rustc/metadata/tyencode.rs @@ -120,7 +120,7 @@ fn enc_substs(w: io::Writer, cx: @ctxt, substs: ty::substs) { do enc_opt(w, substs.self_r) |r| { enc_region(w, cx, r) } do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) } w.write_char('['); - for substs.tps.each |t| { enc_ty(w, cx, t); } + for substs.tps.each |t| { enc_ty(w, cx, *t); } w.write_char(']'); } @@ -243,7 +243,7 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) { } ty::ty_tup(ts) => { w.write_str(&"T["); - for ts.each |t| { enc_ty(w, cx, t); } + for ts.each |t| { enc_ty(w, cx, *t); } w.write_char(']'); } ty::ty_box(mt) => { w.write_char('@'); enc_mt(w, cx, mt); } @@ -355,7 +355,7 @@ fn enc_ty_fn(w: io::Writer, cx: @ctxt, ft: ty::FnTy) { enc_bounds(w, cx, ft.meta.bounds); w.write_char('['); for ft.sig.inputs.each |arg| { - enc_arg(w, cx, arg); + enc_arg(w, cx, *arg); } w.write_char(']'); match ft.meta.ret_style { diff --git a/src/rustc/middle/borrowck/check_loans.rs b/src/rustc/middle/borrowck/check_loans.rs index 839c2f55894..d0c722cab10 100644 --- a/src/rustc/middle/borrowck/check_loans.rs +++ b/src/rustc/middle/borrowck/check_loans.rs @@ -137,15 +137,15 @@ impl check_loan_ctxt { } fn walk_loans(scope_id: ast::node_id, - f: fn(loan) -> bool) { + f: fn(v: &loan) -> bool) { let mut scope_id = scope_id; let region_map = self.tcx().region_map; let req_loan_map = self.req_maps.req_loan_map; loop { for req_loan_map.find(scope_id).each |loanss| { - for (*loanss).each |loans| { - for (*loans).each |loan| { + for loanss.each |loans| { + for loans.each |loan| { if !f(loan) { return; } } } @@ -160,7 +160,7 @@ impl check_loan_ctxt { fn walk_loans_of(scope_id: ast::node_id, lp: @loan_path, - f: fn(loan) -> bool) { + f: fn(v: &loan) -> bool) { for self.walk_loans(scope_id) |loan| { if loan.lp == lp { if !f(loan) { return; } @@ -268,8 +268,8 @@ impl check_loan_ctxt { let par_scope_id = self.tcx().region_map.get(scope_id); for self.walk_loans(par_scope_id) |old_loan| { - for (*new_loanss).each |new_loans| { - for (*new_loans).each |new_loan| { + for new_loanss.each |new_loans| { + for new_loans.each |new_loan| { if old_loan.lp != new_loan.lp { loop; } match (old_loan.mutbl, new_loan.mutbl) { (m_const, _) | (_, m_const) | @@ -368,7 +368,7 @@ impl check_loan_ctxt { // check_for_conflicting_loans() for cmt.lp.each |lp| { self.check_for_loan_conflicting_with_assignment( - at, ex, cmt, lp); + at, ex, cmt, *lp); } self.bccx.add_to_mutbl_map(cmt); @@ -517,7 +517,7 @@ impl check_loan_ctxt { pc, callee, callee_id, callee_span); for args.each |arg| { self.check_pure_callee_or_arg( - pc, Some(arg), arg.id, arg.span); + pc, Some(*arg), arg.id, arg.span); } } } diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index 167e19152fa..b3f846d47fd 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -95,7 +95,7 @@ fn req_loans_in_expr(ex: @ast::expr, // If this expression is borrowed, have to ensure it remains valid: for tcx.adjustments.find(ex.id).each |adjustments| { - self.guarantee_adjustments(ex, adjustments); + self.guarantee_adjustments(ex, *adjustments); } // Special checks for various kinds of expressions: @@ -138,7 +138,7 @@ fn req_loans_in_expr(ex: @ast::expr, let cmt = self.bccx.cat_expr(ex_v); for arms.each |arm| { for arm.pats.each |pat| { - self.gather_pat(cmt, pat, arm.body.node.id, ex.id); + self.gather_pat(cmt, *pat, arm.body.node.id, ex.id); } } visit::visit_expr(ex, self, vt); diff --git a/src/rustc/middle/capture.rs b/src/rustc/middle/capture.rs index 25c5b16194d..1e885605171 100644 --- a/src/rustc/middle/capture.rs +++ b/src/rustc/middle/capture.rs @@ -78,12 +78,12 @@ fn compute_capture_vars(tcx: ty::ctxt, if vec::any(*freevars, |fv| fv.def == cap_def ) { cap_map.insert(cap_def_id, {def:cap_def, span: cap_item.span, - cap_item: Some(cap_item), + cap_item: Some(*cap_item), mode:cap_move}); } else { cap_map.insert(cap_def_id, {def:cap_def, span: cap_item.span, - cap_item: Some(cap_item), + cap_item: Some(*cap_item), mode:cap_drop}); } } else { @@ -92,7 +92,7 @@ fn compute_capture_vars(tcx: ty::ctxt, if vec::any(*freevars, |fv| fv.def == cap_def ) { cap_map.insert(cap_def_id, {def:cap_def, span: cap_item.span, - cap_item: Some(cap_item), + cap_item: Some(*cap_item), mode:cap_copy}); } } diff --git a/src/rustc/middle/check_alt.rs b/src/rustc/middle/check_alt.rs index 79dbcecbec4..9e1c8886353 100644 --- a/src/rustc/middle/check_alt.rs +++ b/src/rustc/middle/check_alt.rs @@ -60,7 +60,7 @@ fn check_arms(tcx: ty::ctxt, arms: ~[arm]) { let mut seen = ~[]; for arms.each |arm| { for arm.pats.each |pat| { - let v = ~[pat]; + let v = ~[*pat]; match is_useful(tcx, seen, v) { not_useful => { tcx.sess.span_err(pat.span, ~"unreachable pattern"); @@ -449,24 +449,16 @@ fn is_refutable(tcx: ty::ctxt, pat: @pat) -> bool { pat_lit(@{node: expr_lit(@{node: lit_nil, _}), _}) => { false } // "()" pat_lit(_) | pat_range(_, _) => { true } pat_rec(fields, _) => { - for fields.each |it| { - if is_refutable(tcx, it.pat) { return true; } - } - false + fields.any(|f| is_refutable(tcx, f.pat)) } pat_struct(_, fields, _) => { - for fields.each |it| { - if is_refutable(tcx, it.pat) { return true; } - } - false + fields.any(|f| is_refutable(tcx, f.pat)) } pat_tup(elts) => { - for elts.each |elt| { if is_refutable(tcx, elt) { return true; } } - false + elts.any(|elt| is_refutable(tcx, elt)) } pat_enum(_, Some(args)) => { - for args.each |p| { if is_refutable(tcx, p) { return true; } }; - false + args.any(|a| is_refutable(tcx, a)) } pat_enum(_,_) => { false } } diff --git a/src/rustc/middle/kind.rs b/src/rustc/middle/kind.rs index cafecf52761..b5bd685d835 100644 --- a/src/rustc/middle/kind.rs +++ b/src/rustc/middle/kind.rs @@ -316,12 +316,12 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) { } } expr_tup(exprs) | expr_vec(exprs, _) => { - for exprs.each |expr| { maybe_copy(cx, expr, None); } + for exprs.each |expr| { maybe_copy(cx, *expr, None); } } expr_call(f, args, _) => { let mut i = 0u; for ty::ty_fn_args(ty::expr_ty(cx.tcx, f)).each |arg_t| { - match ty::arg_mode(cx.tcx, arg_t) { + match ty::arg_mode(cx.tcx, *arg_t) { by_copy => maybe_copy(cx, args[i], None), by_ref | by_val | by_mutbl_ref | by_move => () } diff --git a/src/rustc/middle/lang_items.rs b/src/rustc/middle/lang_items.rs index 6e12475cd9d..e33ff5dcf50 100644 --- a/src/rustc/middle/lang_items.rs +++ b/src/rustc/middle/lang_items.rs @@ -208,7 +208,7 @@ impl LanguageItemCollector { do get_item_attrs(crate_store, def_id) |meta_items| { for meta_items.each |meta_item| { - self.match_and_collect_meta_item(def_id, *meta_item); + self.match_and_collect_meta_item(def_id, **meta_item); } } } @@ -217,7 +217,7 @@ impl LanguageItemCollector { fn check_completeness() { for self.item_refs.each |key, item_ref| { - match copy *item_ref { + match *item_ref { None => { self.session.err(fmt!("no item found for `%s`", key)); } diff --git a/src/rustc/middle/lint.rs b/src/rustc/middle/lint.rs index 630be4c6f6b..7ffc0fcf090 100644 --- a/src/rustc/middle/lint.rs +++ b/src/rustc/middle/lint.rs @@ -278,7 +278,7 @@ impl ctxt { let mut triples = ~[]; for [allow, warn, deny, forbid].each |level| { - let level_name = level_to_str(level); + let level_name = level_to_str(*level); let metas = attr::attr_metas(attr::find_attrs_by_name(attrs, level_name)); @@ -288,7 +288,7 @@ impl ctxt { for metas.each |meta| { match meta.node { ast::meta_word(lintname) => { - vec::push(triples, (meta, level, lintname)); + vec::push(triples, (*meta, *level, lintname)); } _ => { self.sess.span_err( @@ -307,7 +307,7 @@ impl ctxt { } for triples.each |pair| { - let (meta, level, lintname) = pair; + let (meta, level, lintname) = *pair; match self.dict.find(lintname) { None => { self.span_lint( @@ -367,7 +367,7 @@ fn build_settings_crate(sess: session::session, crate: @ast::crate) { // Install command-line options, overriding defaults. for sess.opts.lint_opts.each |pair| { - let (lint,level) = pair; + let (lint,level) = *pair; cx.set_level(lint, level); } @@ -534,7 +534,7 @@ fn check_item_heap(cx: ty::ctxt, it: @ast::item) { for [managed_heap_memory, owned_heap_memory, heap_memory].each |lint| { - check_type_for_lint(cx, lint, node, item, span, ty); + check_type_for_lint(cx, *lint, node, item, span, ty); } } diff --git a/src/rustc/middle/liveness.rs b/src/rustc/middle/liveness.rs index 6a50bdf6698..18480ee483f 100644 --- a/src/rustc/middle/liveness.rs +++ b/src/rustc/middle/liveness.rs @@ -495,7 +495,7 @@ fn visit_local(local: @local, &&self: @IrMaps, vt: vt<@IrMaps>) { fn visit_arm(arm: arm, &&self: @IrMaps, vt: vt<@IrMaps>) { let def_map = self.tcx.def_map; for arm.pats.each |pat| { - do pat_util::pat_bindings(def_map, pat) |bm, p_id, sp, path| { + do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| { debug!("adding local variable %d from match with bm %?", p_id, bm); let name = ast_util::path_to_ident(path); @@ -1524,7 +1524,7 @@ fn check_expr(expr: @expr, &&self: @Liveness, vt: vt<@Liveness>) { expr_path(_) => { for self.variable_from_def_map(expr.id, expr.span).each |var| { let ln = self.live_node(expr.id, expr.span); - self.consider_last_use(expr, ln, var); + self.consider_last_use(expr, ln, *var); } visit::visit_expr(expr, self, vt); @@ -1911,11 +1911,11 @@ impl @Liveness { // FIXME(#3266)--make liveness warnings lintable self.tcx.sess.span_warn( sp, fmt!("variable `%s` is assigned to, \ - but never used", name)); + but never used", *name)); } else { // FIXME(#3266)--make liveness warnings lintable self.tcx.sess.span_warn( - sp, fmt!("unused variable: `%s`", name)); + sp, fmt!("unused variable: `%s`", *name)); } } return true; @@ -1929,7 +1929,7 @@ impl @Liveness { // FIXME(#3266)--make liveness warnings lintable self.tcx.sess.span_warn( sp, - fmt!("value assigned to `%s` is never read", name)); + fmt!("value assigned to `%s` is never read", *name)); } } } diff --git a/src/rustc/middle/mem_categorization.rs b/src/rustc/middle/mem_categorization.rs index e6d27a7d09e..c4b0e0ffd00 100644 --- a/src/rustc/middle/mem_categorization.rs +++ b/src/rustc/middle/mem_categorization.rs @@ -838,8 +838,8 @@ impl &mem_categorization_ctxt { }; for subpats.each |subpat| { - let subcmt = self.cat_variant(subpat, enum_did, cmt); - self.cat_pattern(subcmt, subpat, op); + let subcmt = self.cat_variant(*subpat, enum_did, cmt); + self.cat_pattern(subcmt, *subpat, op); } } @@ -870,8 +870,8 @@ impl &mem_categorization_ctxt { ast::pat_tup(subpats) => { // (p1, ..., pN) for subpats.each |subpat| { - let subcmt = self.cat_tuple_elt(subpat, cmt); - self.cat_pattern(subcmt, subpat, op); + let subcmt = self.cat_tuple_elt(*subpat, cmt); + self.cat_pattern(subcmt, *subpat, op); } } diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs index 5c6e55fe422..b0916af376c 100644 --- a/src/rustc/middle/region.rs +++ b/src/rustc/middle/region.rs @@ -195,8 +195,8 @@ fn parent_id(cx: ctxt, span: span) -> ast::node_id { /// Records the current parent (if any) as the parent of `child_id`. fn record_parent(cx: ctxt, child_id: ast::node_id) { for cx.parent.each |parent_id| { - debug!("parent of node %d is node %d", child_id, parent_id); - cx.region_map.insert(child_id, parent_id); + debug!("parent of node %d is node %d", child_id, *parent_id); + cx.region_map.insert(child_id, *parent_id); } } @@ -700,7 +700,7 @@ fn determine_rp_in_ty(ty: @ast::ty, // type parameters are---for now, anyway---always invariant do cx.with_ambient_variance(rv_invariant) { for path.types.each |tp| { - visitor.visit_ty(tp, cx, visitor); + visitor.visit_ty(*tp, cx, visitor); } } } diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index e2aa963750a..24806197baf 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -484,7 +484,7 @@ pure fn is_none<T>(x: Option<T>) -> bool { fn unused_import_lint_level(session: session) -> level { for session.opts.lint_opts.each |lint_option_pair| { - let (lint_type, lint_level) = lint_option_pair; + let (lint_type, lint_level) = *lint_option_pair; if lint_type == unused_imports { return lint_level; } @@ -1040,7 +1040,7 @@ impl Resolver { sp); for enum_definition.variants.each |variant| { - self.build_reduced_graph_for_variant(variant, + self.build_reduced_graph_for_variant(*variant, local_def(item.id), new_parent, visitor); @@ -1097,7 +1097,7 @@ impl Resolver { // Add the names of all the methods to the trait info. let method_names = @atom_hashmap(); for methods.each |method| { - let ty_m = trait_method_to_ty_method(method); + let ty_m = trait_method_to_ty_method(*method); let atom = ty_m.ident; // Add it to the trait info if not static, @@ -1166,7 +1166,7 @@ impl Resolver { def_ty(local_def(variant.node.id)), variant.span); for enum_definition.variants.each |variant| { - self.build_reduced_graph_for_variant(variant, item_id, + self.build_reduced_graph_for_variant(*variant, item_id, parent, visitor); } } @@ -1203,7 +1203,7 @@ impl Resolver { view_path_glob(module_ident_path, _) | view_path_list(module_ident_path, _, _) => { for module_ident_path.idents.each |ident| { - (*module_path).push(ident); + (*module_path).push(*ident); } } } @@ -1466,7 +1466,7 @@ impl Resolver { Some(method_names) => { let interned_method_names = @atom_hashmap(); for method_names.each |method_data| { - let (method_name, self_ty) = method_data; + let (method_name, self_ty) = *method_data; debug!("(building reduced graph for \ external crate) ... adding \ trait method '%s'", @@ -1530,7 +1530,7 @@ impl Resolver { let mut current_module = root; for pieces.each |ident_str| { - let ident = self.session.ident_of(ident_str); + let ident = self.session.ident_of(*ident_str); // Create or reuse a graph node for the child. let (child_name_bindings, new_parent) = self.add_child(ident, @@ -1542,7 +1542,7 @@ impl Resolver { match child_name_bindings.module_def { NoModuleDef => { debug!("(building reduced graph for external crate) \ - autovivifying %s", ident_str); + autovivifying %s", *ident_str); let parent_link = self.get_parent_link(new_parent, ident); (*child_name_bindings).define_module(parent_link, @@ -1667,7 +1667,7 @@ impl Resolver { self.resolve_imports_for_module(module_); for module_.children.each |_name, child_node| { - match (*child_node).get_module_if_available() { + match child_node.get_module_if_available() { None => { // Nothing to do. } @@ -1724,7 +1724,7 @@ impl Resolver { } else { result += ~"::"; } - result += self.session.str_of(atom); + result += self.session.str_of(*atom); } // XXX: Shouldn't copy here. We need string builder functionality. return result; @@ -2757,7 +2757,7 @@ impl Resolver { // Descend into children and anonymous children. for module_.children.each |_name, child_node| { - match (*child_node).get_module_if_available() { + match child_node.get_module_if_available() { None => { // Continue. } @@ -2809,7 +2809,7 @@ impl Resolver { self.record_exports_for_module(module_); for module_.children.each |_atom, child_name_bindings| { - match (*child_name_bindings).get_module_if_available() { + match child_name_bindings.get_module_if_available() { None => { // Nothing to do. } @@ -2830,9 +2830,9 @@ impl Resolver { let mut exports = ~[]; for self.namespaces.each |namespace| { match self.resolve_definition_of_name_in_module(module_, - name, - namespace, - Xray) { + name, + *namespace, + Xray) { NoNameDefinition => { // Nothing to do. } @@ -3158,7 +3158,7 @@ impl Resolver { // // XXX: Do we need a node ID here? - match method { + match *method { required(ty_m) => { do self.with_type_parameter_rib (HasTypeParameters(&ty_m.tps, @@ -3222,12 +3222,12 @@ impl Resolver { OpaqueFunctionRibKind)) || { - visit_foreign_item(foreign_item, (), + visit_foreign_item(*foreign_item, (), visitor); } } foreign_item_const(_) => { - visit_foreign_item(foreign_item, (), + visit_foreign_item(*foreign_item, (), visitor); } } @@ -3338,9 +3338,9 @@ impl Resolver { // Resolve each captured item. for (*capture_clause).each |capture_item| { match self.resolve_identifier(capture_item.name, - ValueNS, - true, - capture_item.span) { + ValueNS, + true, + capture_item.span) { None => { self.session.span_err(capture_item.span, ~"unresolved name in \ @@ -3422,8 +3422,8 @@ impl Resolver { visitor: ResolveVisitor) { for type_parameters.each |type_parameter| { - for (*type_parameter.bounds).each |bound| { - match bound { + for type_parameter.bounds.each |bound| { + match *bound { bound_copy | bound_send | bound_const | bound_owned => { // Nothing to do. } @@ -3482,7 +3482,7 @@ impl Resolver { // Resolve methods. for methods.each |method| { self.resolve_method(MethodRibKind(id, Provided(method.id)), - method, + *method, outer_type_parameter_count, visitor); } @@ -3605,7 +3605,7 @@ impl Resolver { // We also need a new scope for the method-specific // type parameters. self.resolve_method(MethodRibKind(id, Provided(method.id)), - method, + *method, outer_type_parameter_count, visitor); /* @@ -3718,7 +3718,7 @@ impl Resolver { let bindings_list = atom_hashmap(); for arm.pats.each |pattern| { - self.resolve_pattern(pattern, RefutableMode, Immutable, + self.resolve_pattern(*pattern, RefutableMode, Immutable, Some(bindings_list), visitor); } @@ -3939,7 +3939,7 @@ impl Resolver { // Check the types in the path pattern. for path.types.each |ty| { - self.resolve_type(ty, visitor); + self.resolve_type(*ty, visitor); } } @@ -3964,7 +3964,7 @@ impl Resolver { // Check the types in the path pattern. for path.types.each |ty| { - self.resolve_type(ty, visitor); + self.resolve_type(*ty, visitor); } } @@ -4057,7 +4057,7 @@ impl Resolver { // First, resolve the types. for path.types.each |ty| { - self.resolve_type(ty, visitor); + self.resolve_type(*ty, visitor); } if path.global { @@ -4583,7 +4583,7 @@ impl Resolver { Some(trait_def_ids) => { for trait_def_ids.each |trait_def_id| { self.add_trait_info_if_containing_method - (found_traits, trait_def_id, name); + (found_traits, *trait_def_id, name); } } None => { diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index dddcc70f232..67f20f12409 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -797,7 +797,7 @@ fn compile_guard(bcx: block, // Revoke the temp cleanups now that the guard successfully executed. for temp_cleanups.each |llval| { - revoke_clean(bcx, llval); + revoke_clean(bcx, *llval); } return do with_cond(bcx, Not(bcx, val)) |bcx| { @@ -1158,11 +1158,11 @@ fn trans_alt_inner(scope_cx: block, // is just to reduce code space. See extensive comment at the start // of the file for more details. if arm_data.arm.guard.is_none() { - bcx = store_non_ref_bindings(bcx, arm_data, None); + bcx = store_non_ref_bindings(bcx, *arm_data, None); } // insert bindings into the lllocals map and add cleanups - bcx = insert_lllocals(bcx, arm_data, true); + bcx = insert_lllocals(bcx, *arm_data, true); bcx = controlflow::trans_block(bcx, arm_data.arm.body, dest); bcx = trans_block_cleanups(bcx, block_cleanups(arm_data.bodycx)); @@ -1208,7 +1208,7 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef, } for inner.each |inner_pat| { - bcx = bind_irrefutable_pat(bcx, inner_pat, val, true); + bcx = bind_irrefutable_pat(bcx, *inner_pat, val, true); } } ast::pat_enum(_, sub_pats) => { diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index ee05a3bb4de..70e02a5aaf9 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -409,9 +409,9 @@ fn revoke_clean(cx: block, val: ValueRef) { }); for cleanup_pos.each |i| { info.cleanups = - vec::append(vec::slice(info.cleanups, 0u, i), + vec::append(vec::slice(info.cleanups, 0u, *i), vec::view(info.cleanups, - i + 1u, + *i + 1u, info.cleanups.len())); scope_clean_changed(info); } diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs index 641f0024820..40530a54bd8 100644 --- a/src/rustc/middle/trans/debuginfo.rs +++ b/src/rustc/middle/trans/debuginfo.rs @@ -152,7 +152,7 @@ fn cached_metadata<T: Copy>(cache: metadata_cache, mdtag: int, if cache.contains_key(mdtag) { let items = cache.get(mdtag); for items.each |item| { - let md: T = md_from_metadata::<T>(item); + let md: T = md_from_metadata::<T>(*item); if eq(md) { return option::Some(md); } diff --git a/src/rustc/middle/trans/expr.rs b/src/rustc/middle/trans/expr.rs index c5b31fe5492..ea2aa7c98e1 100644 --- a/src/rustc/middle/trans/expr.rs +++ b/src/rustc/middle/trans/expr.rs @@ -687,7 +687,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock { // at the end of the scope with id `scope_id`: let root_key = {id:expr.id, derefs:0u}; for bcx.ccx().maps.root_map.find(root_key).each |scope_id| { - unrooted_datum.root(bcx, scope_id); + unrooted_datum.root(bcx, *scope_id); } return DatumBlock {bcx: bcx, datum: unrooted_datum}; @@ -996,7 +996,7 @@ fn trans_rec_or_struct(bcx: block, // functional record update) for base.each |base_expr| { let base_datum = unpack_datum!( - bcx, trans_to_datum(bcx, base_expr)); + bcx, trans_to_datum(bcx, *base_expr)); // Copy over inherited fields for field_tys.eachi |i, field_ty| { @@ -1017,7 +1017,9 @@ fn trans_rec_or_struct(bcx: block, // Now revoke the cleanups as we pass responsibility for the data // structure on to the caller - for temp_cleanups.each |cleanup| { revoke_clean(bcx, cleanup); } + for temp_cleanups.each |cleanup| { + revoke_clean(bcx, *cleanup); + } bcx } } diff --git a/src/rustc/middle/trans/meth.rs b/src/rustc/middle/trans/meth.rs index d7a689c5c92..1c973620ba2 100644 --- a/src/rustc/middle/trans/meth.rs +++ b/src/rustc/middle/trans/meth.rs @@ -104,7 +104,7 @@ fn trans_self_arg(bcx: block, base: @ast::expr, // FIXME(#3446)---this is wrong, actually. The temp_cleanups // should be revoked only after all arguments have been passed. for temp_cleanups.each |c| { - revoke_clean(bcx, c) + revoke_clean(bcx, *c) } return result; diff --git a/src/rustc/middle/trans/monomorphize.rs b/src/rustc/middle/trans/monomorphize.rs index fe5b1115ed8..f78a7452f4a 100644 --- a/src/rustc/middle/trans/monomorphize.rs +++ b/src/rustc/middle/trans/monomorphize.rs @@ -28,8 +28,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, } }); - for real_substs.each() |s| { assert !ty::type_has_params(s); } - for substs.each() |s| { assert !ty::type_has_params(s); } + for real_substs.each() |s| { assert !ty::type_has_params(*s); } + for substs.each() |s| { assert !ty::type_has_params(*s); } let param_uses = type_use::type_uses_for(ccx, fn_id, substs.len()); let hash_id = make_mono_id(ccx, fn_id, substs, vtables, Some(param_uses)); if vec::any(hash_id.params, diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs index f1a4a1ff5d1..d7d7af3ed8c 100644 --- a/src/rustc/middle/trans/reachable.rs +++ b/src/rustc/middle/trans/reachable.rs @@ -166,7 +166,9 @@ fn traverse_ty(ty: @ty, cx: ctx, v: visit::vt<ctx>) { Some(d) => traverse_def_id(cx, def_id_of_def(d)), None => { /* do nothing -- but should we fail here? */ } } - for p.types.each |t| { v.visit_ty(t, cx, v); }; + for p.types.each |t| { + v.visit_ty(*t, cx, v); + } } _ => visit::visit_ty(ty, cx, v) } diff --git a/src/rustc/middle/trans/shape.rs b/src/rustc/middle/trans/shape.rs index fcbe0427af0..bed945815d4 100644 --- a/src/rustc/middle/trans/shape.rs +++ b/src/rustc/middle/trans/shape.rs @@ -46,7 +46,7 @@ impl nominal_id_ : to_bytes::IterBytes { pure fn iter_bytes(lsb0: bool, f: to_bytes::Cb) { to_bytes::iter_bytes_2(&self.did, &self.parent_id, lsb0, f); for self.tps.each |t| { - ty::type_id(t).iter_bytes(lsb0, f); + ty::type_id(*t).iter_bytes(lsb0, f); } } } @@ -415,7 +415,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { let mut inf_sz = 0u16; for enum_variants.each |variants| { - let num_variants = vec::len(*variants) as u16; + let num_variants = vec::len(**variants) as u16; add_u16(header, header_sz + inf_sz); inf_sz += 2u16 * (num_variants + 2u16) + 3u16; } @@ -427,31 +427,29 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { let mut lv_table = ~[]; let mut i = 0u; for enum_variants.each |variants| { - add_u16(inf, vec::len(*variants) as u16); + add_u16(inf, vec::len(**variants) as u16); // Construct the largest-variants table. add_u16(inf, header_sz + inf_sz + data_sz + (vec::len(lv_table) as u16)); - let lv = largest_variants(ccx, variants); + let lv = largest_variants(ccx, *variants); add_u16(lv_table, vec::len(lv) as u16); for vec::each(lv) |v| { add_u16(lv_table, *v as u16); } // Determine whether the enum has dynamic size. - assert !vec::any(*variants, |v| { - vec::any(v.args, |t| ty::type_has_params(t)) - }); + assert !variants.any(|v| v.args.any(|t| ty::type_has_params(t))); // If we can, write in the static size and alignment of the enum. // Otherwise, write a placeholder. - let size_align = compute_static_enum_size(ccx, lv, variants); + let size_align = compute_static_enum_size(ccx, lv, *variants); // Write in the static size and alignment of the enum. add_u16(inf, size_align.size); inf += ~[size_align.align]; // Now write in the offset of each variant. - for vec::each(*variants) |_v| { + for variants.each |_v| { add_u16(inf, header_sz + inf_sz + offsets[i]); i += 1u; } @@ -584,7 +582,7 @@ fn gen_resource_shapes(ccx: @crate_ctxt) -> ValueRef { let len = ccx.shape_cx.resources.len(); for uint::range(0u, len) |i| { let ri = ccx.shape_cx.resources.get(i); - for ri.tps.each() |s| { assert !ty::type_has_params(s); } + for ri.tps.each() |s| { assert !ty::type_has_params(*s); } do option::iter(ri.parent_id) |id| { dtors += ~[trans::base::get_res_dtor(ccx, ri.did, id, ri.tps)]; } @@ -613,7 +611,7 @@ fn force_declare_tydescs(ccx: @crate_ctxt) { let len = ccx.shape_cx.resources.len(); for uint::range(0u, len) |i| { let ri = ccx.shape_cx.resources.get(i); - for ri.tps.each() |s| { assert !ty::type_has_params(s); } + for ri.tps.each() |s| { assert !ty::type_has_params(*s); } do option::iter(ri.parent_id) |id| { trans::base::get_res_dtor(ccx, ri.did, id, ri.tps); } diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs index 8829257c33d..31eeedae104 100644 --- a/src/rustc/middle/trans/tvec.rs +++ b/src/rustc/middle/trans/tvec.rs @@ -319,7 +319,7 @@ fn write_content(bcx: block, match dest { Ignore => { for elements.each |element| { - bcx = expr::trans_into(bcx, element, Ignore); + bcx = expr::trans_into(bcx, *element, Ignore); } } diff --git a/src/rustc/middle/tstate/annotate.rs b/src/rustc/middle/tstate/annotate.rs index a8dbbcab2b8..000a93f7667 100644 --- a/src/rustc/middle/tstate/annotate.rs +++ b/src/rustc/middle/tstate/annotate.rs @@ -41,7 +41,7 @@ fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut ~[node_id]) { fn init_vecs(ccx: crate_ctxt, node_ids: ~[node_id], len: uint) { for node_ids.each |i| { log(debug, int::str(i) + ~" |-> " + uint::str(len)); - add_node(ccx, i, empty_ann(len)); + add_node(ccx, *i, empty_ann(len)); } } diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index 5197bd9a161..8f9f8fb03ad 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -900,7 +900,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t { } fn sflags(substs: &substs) -> uint { let mut f = 0u; - for substs.tps.each |tt| { f |= get(tt).flags; } + for substs.tps.each |tt| { f |= get(*tt).flags; } substs.self_r.iter(|r| f |= rflags(r)); return f; } @@ -931,7 +931,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t { flags |= get(m.ty).flags; } ty_rec(flds) => for flds.each |f| { flags |= get(f.mt.ty).flags; }, - ty_tup(ts) => for ts.each |tt| { flags |= get(tt).flags; }, + ty_tup(ts) => for ts.each |tt| { flags |= get(*tt).flags; }, ty_fn(ref f) => { match f.meta.proto { ty::proto_vstore(vstore_slice(r)) => flags |= rflags(r), @@ -1135,12 +1135,12 @@ fn maybe_walk_ty(ty: t, f: fn(t) -> bool) { } ty_enum(_, substs) | ty_class(_, substs) | ty_trait(_, substs, _) => { - for substs.tps.each |subty| { maybe_walk_ty(subty, f); } + for substs.tps.each |subty| { maybe_walk_ty(*subty, f); } } ty_rec(fields) => { for fields.each |fl| { maybe_walk_ty(fl.mt.ty, f); } } - ty_tup(ts) => { for ts.each |tt| { maybe_walk_ty(tt, f); } } + ty_tup(ts) => { for ts.each |tt| { maybe_walk_ty(*tt, f); } } ty_fn(ref ft) => { for ft.sig.inputs.each |a| { maybe_walk_ty(a.ty, f); } maybe_walk_ty(ft.sig.output, f); @@ -1621,7 +1621,7 @@ fn type_needs_drop(cx: ctxt, ty: t) -> bool { } } ty_tup(elts) => { - for elts.each |m| { if type_needs_drop(cx, m) { accum = true; } } + for elts.each |m| { if type_needs_drop(cx, *m) { accum = true; } } accum } ty_enum(did, ref substs) => { @@ -1629,7 +1629,7 @@ fn type_needs_drop(cx: ctxt, ty: t) -> bool { for vec::each(*variants) |variant| { for variant.args.each |aty| { // Perform any type parameter substitutions. - let arg_ty = subst(cx, substs, aty); + let arg_ty = subst(cx, substs, *aty); if type_needs_drop(cx, arg_ty) { accum = true; } } if accum { break; } @@ -1692,7 +1692,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t, ty_enum(did, ref substs) => { for vec::each(*enum_variants(cx, did)) |v| { for v.args.each |aty| { - let t = subst(cx, substs, aty); + let t = subst(cx, substs, *aty); needs_unwind_cleanup |= type_needs_unwind_cleanup_(cx, t, tycache, encountered_box); @@ -2040,7 +2040,7 @@ fn type_kind(cx: ctxt, ty: t) -> kind { // Tuples lower to the lowest of their members. ty_tup(tys) => { let mut lowest = kind_top(); - for tys.each |ty| { lowest = lower_kind(lowest, type_kind(cx, ty)); } + for tys.each |ty| { lowest = lower_kind(lowest, type_kind(cx, *ty)); } lowest } @@ -2054,7 +2054,7 @@ fn type_kind(cx: ctxt, ty: t) -> kind { for vec::each(*variants) |variant| { for variant.args.each |aty| { // Perform any type parameter substitutions. - let arg_ty = subst(cx, substs, aty); + let arg_ty = subst(cx, substs, *aty); lowest = lower_kind(lowest, type_kind(cx, arg_ty)); if lowest == kind_noncopyable() { break; } } @@ -2273,7 +2273,7 @@ fn type_structurally_contains(cx: ctxt, ty: t, test: fn(x: &sty) -> bool) -> ty_enum(did, ref substs) => { for vec::each(*enum_variants(cx, did)) |variant| { for variant.args.each |aty| { - let sty = subst(cx, substs, aty); + let sty = subst(cx, substs, *aty); if type_structurally_contains(cx, sty, test) { return true; } } } @@ -2297,7 +2297,7 @@ fn type_structurally_contains(cx: ctxt, ty: t, test: fn(x: &sty) -> bool) -> ty_tup(ts) => { for ts.each |tt| { - if type_structurally_contains(cx, tt, test) { return true; } + if type_structurally_contains(cx, *tt, test) { return true; } } return false; } @@ -2374,7 +2374,7 @@ fn type_is_pod(cx: ctxt, ty: t) -> bool { } } ty_tup(elts) => { - for elts.each |elt| { if !type_is_pod(cx, elt) { result = false; } } + for elts.each |elt| { if !type_is_pod(cx, *elt) { result = false; } } } ty_estr(vstore_fixed(_)) => result = true, ty_evec(mt, vstore_fixed(_)) | ty_unboxed_vec(mt) => { @@ -3667,9 +3667,9 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident, if is_local(did) { let ms = lookup_class_method_ids(cx, did); for ms.each |m| { - if m.name == name { - return ast_util::local_def(m.id); - } + if m.name == name { + return ast_util::local_def(m.id); + } } cx.sess.span_fatal(sp, fmt!("Class doesn't have a method \ named %s", cx.sess.str_of(name))); diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs index 5a078210276..b7cdbbb8571 100644 --- a/src/rustc/middle/typeck/check.rs +++ b/src/rustc/middle/typeck/check.rs @@ -428,7 +428,7 @@ fn check_no_duplicate_fields(tcx: ty::ctxt, fields: let field_names = HashMap(); for fields.each |p| { - let (id, sp) = p; + let (id, sp) = *p; match field_names.find(id) { Some(orig_sp) => { tcx.sess.span_err(sp, fmt!("Duplicate field \ @@ -477,7 +477,7 @@ fn check_struct(ccx: @crate_ctxt, struct_def: @ast::struct_def, // typecheck the methods for struct_def.methods.each |m| { - check_method(ccx, m, self_ty, local_def(id)); + check_method(ccx, *m, self_ty, local_def(id)); } // Check that there's at least one field if struct_def.fields.len() < 1u { @@ -507,12 +507,12 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) { ccx.tcx.sess.str_of(it.ident), it.id, rp); let self_ty = ccx.to_ty(rscope::type_rscope(rp), ty); for ms.each |m| { - check_method(ccx, m, self_ty, local_def(it.id)); + check_method(ccx, *m, self_ty, local_def(it.id)); } } ast::item_trait(_, _, trait_methods) => { for trait_methods.each |trait_method| { - match trait_method { + match *trait_method { required(*) => { // Nothing to do, since required methods don't have // bodies to check. @@ -542,7 +542,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) { if syntax::attr::foreign_abi(it.attrs) == either::Right(ast::foreign_abi_rust_intrinsic) { for m.items.each |item| { - check_intrinsic_type(ccx, item); + check_intrinsic_type(ccx, *item); } } else { for m.items.each |item| { @@ -868,7 +868,7 @@ fn check_expr(fcx: @fn_ctxt, expr: @ast::expr, expected: Option<ty::t>) -> bool { return do check_expr_with_unifier(fcx, expr, expected) { for expected.each |t| { - demand::suptype(fcx, expr.span, t, fcx.expr_ty(expr)); + demand::suptype(fcx, expr.span, *t, fcx.expr_ty(expr)); } }; } @@ -1024,6 +1024,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // of arguments when we typecheck the functions. This isn't really the // right way to do this. for [false, true]/_.each |check_blocks| { + let check_blocks = *check_blocks; debug!("check_blocks=%b", check_blocks); // More awful hacks: before we check the blocks, try to do @@ -1442,7 +1443,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, ast::expr_vec(args, mutbl) => { let tt = ast_expr_vstore_to_vstore(fcx, ev, vec::len(args), vst); let t: ty::t = fcx.infcx().next_ty_var(); - for args.each |e| { bot |= check_expr_with(fcx, e, t); } + for args.each |e| { bot |= check_expr_with(fcx, *e, t); } ty::mk_evec(tcx, {ty: t, mutbl: mutbl}, tt) } ast::expr_repeat(element, count_expr, mutbl) => { @@ -1813,7 +1814,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, } ast::expr_vec(args, mutbl) => { let t: ty::t = fcx.infcx().next_ty_var(); - for args.each |e| { bot |= check_expr_with(fcx, e, t); } + for args.each |e| { bot |= check_expr_with(fcx, *e, t); } let typ = ty::mk_evec(tcx, {ty: t, mutbl: mutbl}, ty::vstore_fixed(args.len())); fcx.write_ty(id, typ); @@ -2138,7 +2139,7 @@ fn check_stmt(fcx: @fn_ctxt, stmt: @ast::stmt) -> bool { node_id = id; match decl.node { ast::decl_local(ls) => for ls.each |l| { - bot |= check_decl_local(fcx, l); + bot |= check_decl_local(fcx, *l); }, ast::decl_item(_) => {/* ignore for now */ } } @@ -2186,7 +2187,7 @@ fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool { fcx.ccx.tcx.sess.span_warn(s.span, ~"unreachable statement"); warned = true; } - bot |= check_stmt(fcx, s); + bot |= check_stmt(fcx, *s); } match blk.node.expr { None => fcx.write_nil(blk.node.id), diff --git a/src/rustc/middle/typeck/check/alt.rs b/src/rustc/middle/typeck/check/alt.rs index cd454c54b2c..77d1ce4f774 100644 --- a/src/rustc/middle/typeck/check/alt.rs +++ b/src/rustc/middle/typeck/check/alt.rs @@ -23,7 +23,7 @@ fn check_alt(fcx: @fn_ctxt, block_region: ty::re_scope(arm.body.node.id) }; - for arm.pats.each |p| { check_pat(pcx, p, pattern_ty);} + for arm.pats.each |p| { check_pat(pcx, *p, pattern_ty);} check_legality_of_move_bindings(fcx, is_lvalue, arm.guard.is_some(), @@ -58,7 +58,7 @@ fn check_legality_of_move_bindings(fcx: @fn_ctxt, let mut by_ref = None; let mut any_by_move = false; for pats.each |pat| { - do pat_util::pat_bindings(def_map, pat) |bm, _id, span, _path| { + do pat_util::pat_bindings(def_map, *pat) |bm, _id, span, _path| { match bm { ast::bind_by_ref(_) | ast::bind_by_implicit_ref => { by_ref = Some(span); @@ -73,7 +73,7 @@ fn check_legality_of_move_bindings(fcx: @fn_ctxt, if !any_by_move { return; } // pointless micro-optimization for pats.each |pat| { - do walk_pat(pat) |p| { + do walk_pat(*pat) |p| { if !pat_is_variant(def_map, p) { match p.node { ast::pat_ident(ast::bind_by_move, _, sub) => { @@ -411,7 +411,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { } let mut i = 0u; for elts.each |elt| { - check_pat(pcx, elt, ex_elts[i]); + check_pat(pcx, *elt, ex_elts[i]); i += 1u; } diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs index edb157a6059..815101a1448 100644 --- a/src/rustc/middle/typeck/check/method.rs +++ b/src/rustc/middle/typeck/check/method.rs @@ -272,11 +272,11 @@ impl LookupContext { for applicable_traits.each |trait_did| { let coherence_info = self.fcx.ccx.coherence_info; let opt_impl_infos = - coherence_info.extension_methods.find(trait_did); + coherence_info.extension_methods.find(*trait_did); for opt_impl_infos.each |impl_infos| { for impl_infos.each |impl_info| { self.push_candidates_from_impl( - &self.extension_candidates, impl_info); + &self.extension_candidates, *impl_info); } } } @@ -414,7 +414,7 @@ impl LookupContext { for opt_impl_infos.each |impl_infos| { for impl_infos.each |impl_info| { self.push_candidates_from_impl( - &self.inherent_candidates, impl_info); + &self.inherent_candidates, *impl_info); } } } @@ -633,7 +633,7 @@ impl LookupContext { let region = self.infcx().next_region_var(self.expr.span, self.expr.id); for mutbls.each |mutbl| { - let autoref_ty = mk_autoref_ty(mutbl, region); + let autoref_ty = mk_autoref_ty(*mutbl, region); match self.search_for_method(autoref_ty) { None => {} Some(move mme) => { @@ -642,7 +642,7 @@ impl LookupContext { @{autoderefs: autoderefs, autoref: Some({kind: kind, region: region, - mutbl: mutbl})}); + mutbl: *mutbl})}); return Some(mme); } } diff --git a/src/rustc/middle/typeck/check/regionck.rs b/src/rustc/middle/typeck/check/regionck.rs index d77bc7c3dda..d8ea330ee11 100644 --- a/src/rustc/middle/typeck/check/regionck.rs +++ b/src/rustc/middle/typeck/check/regionck.rs @@ -199,7 +199,7 @@ fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) { } for args.each |arg| { - constrain_auto_ref(rcx, arg); + constrain_auto_ref(rcx, *arg); } } diff --git a/src/rustc/middle/typeck/check/regionmanip.rs b/src/rustc/middle/typeck/check/regionmanip.rs index 288baec27b5..64065a5bd35 100644 --- a/src/rustc/middle/typeck/check/regionmanip.rs +++ b/src/rustc/middle/typeck/check/regionmanip.rs @@ -33,7 +33,7 @@ fn replace_bound_regions_in_fn_ty( } - for self_ty.each |t| { vec::push(all_tys, t) } + for self_ty.each |t| { vec::push(all_tys, *t) } debug!("replace_bound_regions_in_fn_ty(self_info.self_ty=%?, fn_ty=%s, \ all_tys=%?)", diff --git a/src/rustc/middle/typeck/check/vtable.rs b/src/rustc/middle/typeck/check/vtable.rs index 08564ff4151..d48f5b9c070 100644 --- a/src/rustc/middle/typeck/check/vtable.rs +++ b/src/rustc/middle/typeck/check/vtable.rs @@ -51,7 +51,7 @@ fn lookup_vtables(fcx: @fn_ctxt, match *bound { ty::bound_trait(i_ty) => { let i_ty = ty::subst(tcx, substs, i_ty); - vec::push(result, lookup_vtable(fcx, expr, ty, i_ty, + vec::push(result, lookup_vtable(fcx, expr, *ty, i_ty, allow_unsafe, is_early)); } _ => () diff --git a/src/rustc/middle/typeck/check/writeback.rs b/src/rustc/middle/typeck/check/writeback.rs index d7d6647e5bd..a70e5f600d3 100644 --- a/src/rustc/middle/typeck/check/writeback.rs +++ b/src/rustc/middle/typeck/check/writeback.rs @@ -36,7 +36,7 @@ fn resolve_method_map_entry(fcx: @fn_ctxt, sp: span, id: ast::node_id) for resolve_type_vars_in_type(fcx, sp, mme.self_arg.ty).each |t| { fcx.ccx.method_map.insert( id, - {self_arg: {mode: mme.self_arg.mode, ty: t}, + {self_arg: {mode: mme.self_arg.mode, ty: *t}, ..*mme}); } } @@ -93,7 +93,7 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id) Some(substs) => { let mut new_tps = ~[]; for substs.tps.each |subst| { - match resolve_type_vars_in_type(fcx, sp, subst) { + match resolve_type_vars_in_type(fcx, sp, *subst) { Some(t) => vec::push(new_tps, t), None => { wbcx.success = false; return None; } } diff --git a/src/rustc/middle/typeck/coherence.rs b/src/rustc/middle/typeck/coherence.rs index e4ffc6c17c3..17d5f81993d 100644 --- a/src/rustc/middle/typeck/coherence.rs +++ b/src/rustc/middle/typeck/coherence.rs @@ -178,7 +178,7 @@ impl CoherenceChecker { trait `%s` with id %d", sess.str_of(item.ident), item.id); - match trait_method { + match *trait_method { required(_) => { /* fall through */} provided(m) => { // For every provided method in the @@ -293,7 +293,7 @@ impl CoherenceChecker { for associated_traits.each |associated_trait| { let trait_did = - self.trait_ref_to_trait_def_id(associated_trait); + self.trait_ref_to_trait_def_id(*associated_trait); debug!("(checking implementation) adding impl for trait \ '%s', item '%s'", ast_map::node_id_to_str( @@ -547,7 +547,7 @@ impl CoherenceChecker { debug!( "(creating impl) adding provided method `%s` to impl", sess.str_of(provided_method.ident)); - push(methods, provided_method); + push(methods, *provided_method); } } @@ -560,7 +560,7 @@ impl CoherenceChecker { for ast_methods.each |ast_method| { push(methods, - method_to_MethodInfo(ast_method)); + method_to_MethodInfo(*ast_method)); } // For each trait that the impl implements, see what @@ -569,7 +569,8 @@ impl CoherenceChecker { // impl, use the provided definition in the trait. for trait_refs.each |trait_ref| { - let trait_did = self.trait_ref_to_trait_def_id(trait_ref); + let trait_did = + self.trait_ref_to_trait_def_id(*trait_ref); match self.crate_context.provided_methods_map .find(trait_did.node) { @@ -694,9 +695,9 @@ impl CoherenceChecker { // Record all the trait methods. for associated_traits.each |trait_type| { - match get(trait_type).sty { + match get(*trait_type).sty { ty_trait(trait_id, _, _) => { - self.add_trait_method(trait_id, implementation); + self.add_trait_method(trait_id, *implementation); } _ => { self.crate_context.tcx.sess.bug(~"trait type \ @@ -718,7 +719,7 @@ impl CoherenceChecker { } Some(base_type_def_id) => { self.add_inherent_method(base_type_def_id, - implementation); + *implementation); self.base_type_def_ids.insert(implementation.did, base_type_def_id); diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs index ef557ebfa0d..d984ca1a18a 100644 --- a/src/rustc/middle/typeck/collect.rs +++ b/src/rustc/middle/typeck/collect.rs @@ -453,7 +453,7 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) { let cms = convert_methods(ccx, ms, rp, i_bounds); for trait_ref.each |t| { - check_methods_against_trait(ccx, tps, rp, selfty, t, cms); + check_methods_against_trait(ccx, tps, rp, selfty, *t, cms); } } ast::item_trait(tps, _, trait_methods) => { @@ -469,7 +469,7 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) { // FIXME (#2616): something like this, when we start having // trait inheritance? // for trait_ref.each |t| { - // check_methods_against_trait(ccx, tps, rp, selfty, t, cms); + // check_methods_against_trait(ccx, tps, rp, selfty, *t, cms); // } } ast::item_class(struct_def, tps) => { @@ -539,13 +539,13 @@ fn convert_struct(ccx: @crate_ctxt, // Write the type of each of the members for struct_def.fields.each |f| { - convert_field(ccx, rp, tpt.bounds, f); + convert_field(ccx, rp, tpt.bounds, *f); } let {bounds, substs} = mk_substs(ccx, tps, rp); let selfty = ty::mk_class(tcx, local_def(id), substs); let cms = convert_methods(ccx, struct_def.methods, rp, bounds); for struct_def.traits.each |trait_ref| { - check_methods_against_trait(ccx, tps, rp, selfty, trait_ref, cms); + check_methods_against_trait(ccx, tps, rp, selfty, *trait_ref, cms); // trait_ref.impl_id represents (class, trait) pair write_ty_to_tcx(tcx, trait_ref.impl_id, tpt.ty); tcx.tcache.insert(local_def(trait_ref.impl_id), tpt); diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs index e67aba71eae..17af1fd565b 100644 --- a/src/rustc/util/ppaux.rs +++ b/src/rustc/util/ppaux.rs @@ -286,7 +286,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { } s += ~"("; let mut strs = ~[]; - for inputs.each |a| { vec::push(strs, fn_input_to_str(cx, a)); } + for inputs.each |a| { vec::push(strs, fn_input_to_str(cx, *a)); } s += str::connect(strs, ~", "); s += ~")"; if ty::get(output).sty != ty_nil { @@ -311,7 +311,8 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { // if there is an id, print that instead of the structural type: for ty::type_def_id(typ).each |def_id| { // note that this typedef cannot have type parameters - return ast_map::path_to_str(ty::item_path(cx, def_id),cx.sess.intr()); + return ast_map::path_to_str(ty::item_path(cx, *def_id), + cx.sess.intr()); } // pretty print the structural type representation: @@ -341,12 +342,12 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { ty_type => ~"type", ty_rec(elems) => { let mut strs: ~[~str] = ~[]; - for elems.each |fld| { vec::push(strs, field_to_str(cx, fld)); } + for elems.each |fld| { vec::push(strs, field_to_str(cx, *fld)); } ~"{" + str::connect(strs, ~",") + ~"}" } ty_tup(elems) => { let mut strs = ~[]; - for elems.each |elem| { vec::push(strs, ty_to_str(cx, elem)); } + for elems.each |elem| { vec::push(strs, ty_to_str(cx, *elem)); } ~"(" + str::connect(strs, ~",") + ~")" } ty_fn(ref f) => { diff --git a/src/rustdoc/markdown_pass.rs b/src/rustdoc/markdown_pass.rs index 67972de92e7..39971bc91e7 100644 --- a/src/rustdoc/markdown_pass.rs +++ b/src/rustdoc/markdown_pass.rs @@ -347,8 +347,8 @@ fn write_mod_contents( write_index(ctxt, option::get(doc.index)); } - for doc.items.each |ItemTag| { - write_item(ctxt, ItemTag); + for doc.items.each |itemTag| { + write_item(ctxt, *itemTag); } } @@ -451,8 +451,8 @@ fn write_nmod(ctxt: Ctxt, doc: doc::NmodDoc) { } for doc.fns.each |FnDoc| { - write_item_header(ctxt, doc::FnTag(FnDoc)); - write_fn(ctxt, FnDoc); + write_item_header(ctxt, doc::FnTag(*FnDoc)); + write_fn(ctxt, *FnDoc); } } diff --git a/src/rustdoc/sectionalize_pass.rs b/src/rustdoc/sectionalize_pass.rs index 608f61fa48b..2a011889536 100644 --- a/src/rustdoc/sectionalize_pass.rs +++ b/src/rustdoc/sectionalize_pass.rs @@ -95,7 +95,7 @@ fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) { let mut sections = ~[]; for lines.each |line| { - match parse_header(line) { + match parse_header(*line) { Some(header) => { if option::is_some(current_section) { sections += ~[option::get(current_section)]; @@ -109,17 +109,17 @@ fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) { match copy current_section { Some(section) => { current_section = Some({ - body: section.body + ~"\n" + line, + body: section.body + ~"\n" + *line, .. section }); } None => { new_desc = match new_desc { Some(desc) => { - Some(desc + ~"\n" + line) + Some(desc + ~"\n" + *line) } None => { - Some(line) + Some(*line) } }; } diff --git a/src/test/bench/graph500-bfs.rs b/src/test/bench/graph500-bfs.rs index 15ffbfc182a..5e49a5488e9 100644 --- a/src/test/bench/graph500-bfs.rs +++ b/src/test/bench/graph500-bfs.rs @@ -123,9 +123,9 @@ fn bfs(graph: graph, key: node_id) -> bfs_result { let t = Q.pop_front(); do graph[t].each() |k| { - if marks[k] == -1i64 { - marks[k] = t; - Q.add_back(k); + if marks[*k] == -1i64 { + marks[*k] = t; + Q.add_back(*k); } true }; @@ -183,8 +183,8 @@ fn bfs2(graph: graph, key: node_id) -> bfs_result { let mut color = white; do neighbors.each() |k| { - if is_gray(colors[k]) { - color = gray(k); + if is_gray(colors[*k]) { + color = gray(*k); false } else { true } @@ -264,8 +264,8 @@ fn pbfs(&&graph: arc::ARC<graph>, key: node_id) -> bfs_result { let mut color = white; do neighbors.each() |k| { - if is_gray(colors[k]) { - color = gray(k); + if is_gray(colors[*k]) { + color = gray(*k); false } else { true } diff --git a/src/test/bench/msgsend-ring-mutex-arcs.rs b/src/test/bench/msgsend-ring-mutex-arcs.rs index 86ab3df63b8..ca76835dadd 100644 --- a/src/test/bench/msgsend-ring-mutex-arcs.rs +++ b/src/test/bench/msgsend-ring-mutex-arcs.rs @@ -99,7 +99,7 @@ fn main(args: ~[~str]) { thread_ring(0u, msg_per_task, option::unwrap(num_chan), num_port); // synchronize - for futures.each |f| { future::get(&f) }; + for futures.each |f| { future::get(f) }; let stop = time::precise_time_s(); diff --git a/src/test/bench/msgsend-ring-pipes.rs b/src/test/bench/msgsend-ring-pipes.rs index 4e873b0127b..9fa30af4bde 100644 --- a/src/test/bench/msgsend-ring-pipes.rs +++ b/src/test/bench/msgsend-ring-pipes.rs @@ -96,7 +96,7 @@ fn main(args: ~[~str]) { thread_ring(0u, msg_per_task, option::unwrap(num_chan), num_port); // synchronize - for futures.each |f| { future::get(&f) }; + for futures.each |f| { future::get(f) }; let stop = time::precise_time_s(); diff --git a/src/test/bench/msgsend-ring-rw-arcs.rs b/src/test/bench/msgsend-ring-rw-arcs.rs index d59a8dc47e8..77e5d7c8b67 100644 --- a/src/test/bench/msgsend-ring-rw-arcs.rs +++ b/src/test/bench/msgsend-ring-rw-arcs.rs @@ -100,7 +100,7 @@ fn main(args: ~[~str]) { thread_ring(0u, msg_per_task, option::unwrap(num_chan), num_port); // synchronize - for futures.each |f| { future::get(&f) }; + for futures.each |f| { future::get(f) }; let stop = time::precise_time_s(); diff --git a/src/test/bench/shootout-k-nucleotide-pipes.rs b/src/test/bench/shootout-k-nucleotide-pipes.rs index 78225a0d68a..bcdd77d015a 100644 --- a/src/test/bench/shootout-k-nucleotide-pipes.rs +++ b/src/test/bench/shootout-k-nucleotide-pipes.rs @@ -46,14 +46,15 @@ fn sort_and_fmt(mm: HashMap<~[u8], uint>, total: uint) -> ~str { }); let pairs_sorted = sortKV(pairs); - + let mut buffer = ~""; - pairs_sorted.each(fn&(kv: (~[u8], float)) -> bool unsafe { - let (k,v) = kv; - buffer += (fmt!("%s %0.3f\n", str::to_upper(str::raw::from_bytes(k)), v)); - return true; - }); + for pairs_sorted.each |kv| { + let (k,v) = *kv; + unsafe { + buffer += (fmt!("%s %0.3f\n", str::to_upper(str::raw::from_bytes(k)), v)); + } + } return buffer; } diff --git a/src/test/bench/shootout-k-nucleotide.rs b/src/test/bench/shootout-k-nucleotide.rs index dae261502ce..bc9d068094e 100644 --- a/src/test/bench/shootout-k-nucleotide.rs +++ b/src/test/bench/shootout-k-nucleotide.rs @@ -43,14 +43,15 @@ fn sort_and_fmt(mm: HashMap<~[u8], uint>, total: uint) -> ~str { }); let pairs_sorted = sortKV(pairs); - + let mut buffer = ~""; - pairs_sorted.each(fn&(kv: (~[u8], float)) -> bool unsafe { - let (k,v) = kv; - buffer += (fmt!("%s %0.3f\n", str::to_upper(str::raw::from_bytes(k)), v)); - return true; - }); + for pairs_sorted.each |kv| { + let (k,v) = *kv; + unsafe { + buffer += (fmt!("%s %0.3f\n", str::to_upper(str::raw::from_bytes(k)), v)); + } + } return buffer; } diff --git a/src/test/bench/shootout-pfib.rs b/src/test/bench/shootout-pfib.rs index 57f64760745..9740e5adc1d 100644 --- a/src/test/bench/shootout-pfib.rs +++ b/src/test/bench/shootout-pfib.rs @@ -78,7 +78,7 @@ fn stress(num_tasks: int) { stress_task(i); } } - for results.each |r| { future::get(&r); } + for results.each |r| { future::get(r); } } fn main(args: ~[~str]) { diff --git a/src/test/bench/task-perf-word-count-generic.rs b/src/test/bench/task-perf-word-count-generic.rs index 606a5aa53b9..b7d6e4769a0 100644 --- a/src/test/bench/task-perf-word-count-generic.rs +++ b/src/test/bench/task-perf-word-count-generic.rs @@ -153,7 +153,8 @@ mod map_reduce { for inputs.each |i| { let (ctrl, ctrl_server) = ctrl_proto::init(); let ctrl = box(ctrl); - vec::push(tasks, spawn_joinable(|| map_task(map, ctrl, i) )); + let i = copy *i; + vec::push(tasks, spawn_joinable(|move i| map_task(map, ctrl, i))); vec::push(ctrls, ctrl_server); } return tasks; @@ -283,7 +284,7 @@ mod map_reduce { for reducers.each_value |v| { send(v, done) } - for tasks.each |t| { join(t); } + for tasks.each |t| { join(*t); } } } diff --git a/src/test/compile-fail/issue-2149.rs b/src/test/compile-fail/issue-2149.rs index 3917e73a9ae..eb8da1519e8 100644 --- a/src/test/compile-fail/issue-2149.rs +++ b/src/test/compile-fail/issue-2149.rs @@ -5,7 +5,7 @@ trait vec_monad<A> { impl<A> ~[A]: vec_monad<A> { fn bind<B>(f: fn(A) -> ~[B]) { let mut r = fail; - for self.each |elt| { r += f(elt); } + for self.each |elt| { r += f(*elt); } //~^ WARNING unreachable expression //~^^ ERROR the type of this value must be known } diff --git a/src/test/run-pass/argv.rs b/src/test/run-pass/argv.rs index 79d7e4e5b0f..0d067eb0e15 100644 --- a/src/test/run-pass/argv.rs +++ b/src/test/run-pass/argv.rs @@ -1,5 +1,5 @@ fn main(args: ~[~str]) { let vs: ~[~str] = ~[~"hi", ~"there", ~"this", ~"is", ~"a", ~"vec"]; let vvs: ~[~[~str]] = ~[args, vs]; - for vvs.each |vs| { for vs.each |s| { log(debug, s); } } + for vvs.each |vs| { for vs.each |s| { log(debug, *s); } } } diff --git a/src/test/run-pass/block-iter-1.rs b/src/test/run-pass/block-iter-1.rs index 692f9ea6ca4..74a81d5b803 100644 --- a/src/test/run-pass/block-iter-1.rs +++ b/src/test/run-pass/block-iter-1.rs @@ -1,7 +1,7 @@ // xfail-fast #[legacy_modes]; -fn iter_vec<T>(v: ~[T], f: fn(T)) { for v.each |x| { f(x); } } +fn iter_vec<T>(v: ~[T], f: fn(T)) { for v.each |x| { f(*x); } } fn main() { let v = ~[1, 2, 3, 4, 5, 6, 7]; diff --git a/src/test/run-pass/block-iter-2.rs b/src/test/run-pass/block-iter-2.rs index 6f5021ea7b1..6389ef0f644 100644 --- a/src/test/run-pass/block-iter-2.rs +++ b/src/test/run-pass/block-iter-2.rs @@ -1,7 +1,7 @@ // xfail-fast #[legacy_modes]; -fn iter_vec<T>(v: ~[T], f: fn(T)) { for v.each |x| { f(x); } } +fn iter_vec<T>(v: ~[T], f: fn(T)) { for v.each |x| { f(*x); } } fn main() { let v = ~[1, 2, 3, 4, 5]; diff --git a/src/test/run-pass/borrowck-borrow-from-at-vec.rs b/src/test/run-pass/borrowck-borrow-from-at-vec.rs index a79b581c562..a8013c1514c 100644 --- a/src/test/run-pass/borrowck-borrow-from-at-vec.rs +++ b/src/test/run-pass/borrowck-borrow-from-at-vec.rs @@ -1,6 +1,6 @@ fn sum_slice(x: &[int]) -> int { let mut sum = 0; - for x.each |i| { sum += i; } + for x.each |i| { sum += *i; } return sum; } diff --git a/src/test/run-pass/explicit-self-closures.rs b/src/test/run-pass/explicit-self-closures.rs index 3fd9826a392..162c62803c3 100644 --- a/src/test/run-pass/explicit-self-closures.rs +++ b/src/test/run-pass/explicit-self-closures.rs @@ -6,13 +6,13 @@ struct Box { impl Box { fn set_many(&mut self, xs: &[uint]) { - for xs.each |x| { self.x = x; } + for xs.each |x| { self.x = *x; } } fn set_many2(@mut self, xs: &[uint]) { - for xs.each |x| { self.x = x; } + for xs.each |x| { self.x = *x; } } fn set_many3(~mut self, xs: &[uint]) { - for xs.each |x| { self.x = x; } + for xs.each |x| { self.x = *x; } } } diff --git a/src/test/run-pass/hashmap-memory.rs b/src/test/run-pass/hashmap-memory.rs index 5d89cee1083..9e0727471e2 100644 --- a/src/test/run-pass/hashmap-memory.rs +++ b/src/test/run-pass/hashmap-memory.rs @@ -30,7 +30,8 @@ mod map_reduce { fn start_mappers(ctrl: Chan<ctrl_proto>, inputs: ~[~str]) { for inputs.each |i| { - task::spawn(|| map_task(ctrl, i) ); + let i = *i; + task::spawn(|move i| map_task(ctrl, i) ); } } diff --git a/src/test/run-pass/linear-for-loop.rs b/src/test/run-pass/linear-for-loop.rs index 5091020508d..ccb2c25b58d 100644 --- a/src/test/run-pass/linear-for-loop.rs +++ b/src/test/run-pass/linear-for-loop.rs @@ -3,7 +3,7 @@ fn main() { let x = ~[1, 2, 3]; let mut y = 0; - for x.each |i| { log(debug, i); y += i; } + for x.each |i| { log(debug, *i); y += *i; } log(debug, y); assert (y == 6); let s = ~"hello there"; diff --git a/src/test/run-pass/loop-scope.rs b/src/test/run-pass/loop-scope.rs index 9dc53e726d6..6ffbfa21ba4 100644 --- a/src/test/run-pass/loop-scope.rs +++ b/src/test/run-pass/loop-scope.rs @@ -1,6 +1,6 @@ fn main() { let x = ~[10, 20, 30]; let mut sum = 0; - for x.each |x| { sum += x; } + for x.each |x| { sum += *x; } assert (sum == 60); } diff --git a/src/test/run-pass/main-ivec.rs b/src/test/run-pass/main-ivec.rs index 2cd454a1ccf..0c2251a2419 100644 --- a/src/test/run-pass/main-ivec.rs +++ b/src/test/run-pass/main-ivec.rs @@ -1 +1 @@ -fn main(args: ~[~str]) { for args.each |s| { log(debug, s); } } +fn main(args: ~[~str]) { for args.each |s| { log(debug, *s); } } diff --git a/src/test/run-pass/monad.rs b/src/test/run-pass/monad.rs index 131ae43def2..ddfc29fc8d8 100644 --- a/src/test/run-pass/monad.rs +++ b/src/test/run-pass/monad.rs @@ -8,7 +8,7 @@ trait vec_monad<A> { impl<A> ~[A]: vec_monad<A> { fn bind<B: Copy>(f: fn(A) -> ~[B]) -> ~[B] { let mut r = ~[]; - for self.each |elt| { r += f(elt); } + for self.each |elt| { r += f(*elt); } r } } diff --git a/src/test/run-pass/morestack6.rs b/src/test/run-pass/morestack6.rs index da4cfd0b471..deca22de643 100644 --- a/src/test/run-pass/morestack6.rs +++ b/src/test/run-pass/morestack6.rs @@ -52,8 +52,9 @@ fn main() { ]; let rng = rand::Rng(); for fns.each |f| { + let f = *f; let sz = rng.next() % 256u32 + 256u32; let frame_backoff = rng.next() % 10u32 + 1u32; - task::try(|| runtest(f, frame_backoff) ); + task::try(|move f| runtest(f, frame_backoff) ); } } diff --git a/src/test/run-pass/reflect-visit-data.rs b/src/test/run-pass/reflect-visit-data.rs index 7a6217a7c2e..affd0ef6162 100644 --- a/src/test/run-pass/reflect-visit-data.rs +++ b/src/test/run-pass/reflect-visit-data.rs @@ -624,7 +624,7 @@ fn main() { visit_tydesc(td, v); for (copy u.vals).each |s| { - io::println(fmt!("val: %s", s)); + io::println(fmt!("val: %s", *s)); } error!("%?", copy u.vals); assert u.vals == ~[~"1", ~"2", ~"3", ~"true", ~"false", ~"5", ~"4", ~"3"]; diff --git a/src/test/run-pass/static-impl.rs b/src/test/run-pass/static-impl.rs index 60965f801fb..750d13b87e2 100644 --- a/src/test/run-pass/static-impl.rs +++ b/src/test/run-pass/static-impl.rs @@ -36,10 +36,10 @@ trait vec_utils<T> { impl<T> ~[T]: vec_utils<T> { fn length_() -> uint { vec::len(self) } - fn iter_(f: fn(T)) { for self.each |x| { f(x); } } + fn iter_(f: fn(T)) { for self.each |x| { f(*x); } } fn map_<U: Copy>(f: fn(T) -> U) -> ~[U] { let mut r = ~[]; - for self.each |elt| { r += ~[f(elt)]; } + for self.each |elt| { r += ~[f(*elt)]; } r } } diff --git a/src/test/run-pass/static-method-test.rs b/src/test/run-pass/static-method-test.rs index ef9e08797d5..2619cc05586 100644 --- a/src/test/run-pass/static-method-test.rs +++ b/src/test/run-pass/static-method-test.rs @@ -55,7 +55,7 @@ fn map<T, IT: BaseIter<T>, U, BU: buildable<U>> (v: IT, f: fn(T) -> U) -> BU { do build |push| { for v.each() |elem| { - push(f(elem)); + push(f(*elem)); } } } diff --git a/src/test/run-pass/task-comm-3.rs b/src/test/run-pass/task-comm-3.rs index c4ed0302b35..dd2dd5e54d4 100644 --- a/src/test/run-pass/task-comm-3.rs +++ b/src/test/run-pass/task-comm-3.rs @@ -32,7 +32,7 @@ fn test00() { // Create and spawn tasks... let mut results = ~[]; while i < number_of_tasks { - let ch = po.chan(); + let ch = po.chan(); do task::task().future_result(|+r| { vec::push(results, r); }).spawn |copy i| { @@ -53,7 +53,7 @@ fn test00() { } // Join spawned tasks... - for results.each |r| { future::get(&r); } + for results.each |r| { future::get(r); } debug!("Completed: Final number is: "); log(error, sum); diff --git a/src/test/run-pass/task-comm.rs b/src/test/run-pass/task-comm.rs index 4b6ea2c1603..a3a6b6efd7f 100644 --- a/src/test/run-pass/task-comm.rs +++ b/src/test/run-pass/task-comm.rs @@ -51,7 +51,7 @@ fn test00() { while i < number_of_messages { sum += recv(po); i = i + 1; } } - for results.each |r| { future::get(&r); } + for results.each |r| { future::get(r); } debug!("Completed: Final number is: "); assert (sum == @@ -134,7 +134,7 @@ fn test06() { } - for results.each |r| { future::get(&r); } + for results.each |r| { future::get(r); } } diff --git a/src/test/run-pass/trait-generic.rs b/src/test/run-pass/trait-generic.rs index 9b8e752d808..83391d80e26 100644 --- a/src/test/run-pass/trait-generic.rs +++ b/src/test/run-pass/trait-generic.rs @@ -20,7 +20,7 @@ trait map<T> { impl<T> ~[T]: map<T> { fn map<U: Copy>(f: fn(T) -> U) -> ~[U] { let mut r = ~[]; - for self.each |x| { r += ~[f(x)]; } + for self.each |x| { r += ~[f(*x)]; } r } } |
