diff options
| author | Patrick Walton <pcwalton@mimiga.net> | 2012-12-04 10:50:00 -0800 |
|---|---|---|
| committer | Patrick Walton <pcwalton@mimiga.net> | 2012-12-04 14:19:19 -0800 |
| commit | 56ece46f7de9d1703dd39f952afac9bed22633b6 (patch) | |
| tree | ede7f9ee4631628cc8603e692a0a3c1c5a00577e | |
| parent | 94be14516968501306f1ed95774a3f227956e809 (diff) | |
| download | rust-56ece46f7de9d1703dd39f952afac9bed22633b6.tar.gz rust-56ece46f7de9d1703dd39f952afac9bed22633b6.zip | |
librustc: Remove all legacy pattern bindings from libsyntax and librustc. rs=refactoring
87 files changed, 937 insertions, 923 deletions
diff --git a/src/librustc/back/link.rs b/src/librustc/back/link.rs index f4ac2dee300..52b94b76fe6 100644 --- a/src/librustc/back/link.rs +++ b/src/librustc/back/link.rs @@ -412,12 +412,12 @@ fn build_link_meta(sess: Session, c: ast::crate, output: &Path, for linkage_metas.each |meta| { if attr::get_meta_item_name(*meta) == ~"name" { match attr::get_meta_item_value_str(*meta) { - Some(v) => { name = Some(v); } + Some(ref v) => { name = Some((*v)); } None => cmh_items.push(*meta) } } else if attr::get_meta_item_name(*meta) == ~"vers" { match attr::get_meta_item_value_str(*meta) { - Some(v) => { vers = Some(v); } + Some(ref v) => { vers = Some((*v)); } None => cmh_items.push(*meta) } } else { cmh_items.push(*meta); } @@ -443,12 +443,12 @@ fn build_link_meta(sess: Session, c: ast::crate, output: &Path, symbol_hasher.reset(); for cmh_items.each |m| { match m.node { - ast::meta_name_value(key, value) => { - symbol_hasher.write_str(len_and_str(key)); + ast::meta_name_value(ref key, value) => { + symbol_hasher.write_str(len_and_str((*key))); symbol_hasher.write_str(len_and_str_lit(value)); } - ast::meta_word(name) => { - symbol_hasher.write_str(len_and_str(name)); + ast::meta_word(ref name) => { + symbol_hasher.write_str(len_and_str((*name))); } ast::meta_list(_, _) => { // FIXME (#607): Implement this @@ -473,13 +473,13 @@ fn build_link_meta(sess: Session, c: ast::crate, output: &Path, fn crate_meta_name(sess: Session, _crate: ast::crate, output: &Path, metas: provided_metas) -> ~str { return match metas.name { - Some(v) => v, + Some(ref v) => (*v), None => { let name = match output.filestem() { None => sess.fatal(fmt!("output file name `%s` doesn't\ appear to have a stem", output.to_str())), - Some(s) => s + Some(ref s) => (*s) }; warn_missing(sess, ~"name", name); name @@ -490,7 +490,7 @@ fn build_link_meta(sess: Session, c: ast::crate, output: &Path, fn crate_meta_vers(sess: Session, _crate: ast::crate, metas: provided_metas) -> ~str { return match metas.vers { - Some(v) => v, + Some(ref v) => (*v), None => { let vers = ~"0.0"; warn_missing(sess, ~"vers", vers); @@ -534,7 +534,7 @@ fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &hash::State, t: ty::t, fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> ~str { match ccx.type_hashcodes.find(t) { - Some(h) => return h, + Some(ref h) => return (*h), None => { let hash = symbol_hash(ccx.tcx, ccx.symbol_hasher, t, ccx.link_meta); ccx.type_hashcodes.insert(t, hash); diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 6ce9934b729..a131386ba16 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -39,7 +39,7 @@ fn anon_src() -> ~str { ~"<anon>" } fn source_name(input: input) -> ~str { match input { - file_input(ifile) => ifile.to_str(), + file_input(ref ifile) => (*ifile).to_str(), str_input(_) => anon_src() } } @@ -121,13 +121,13 @@ enum input { fn parse_input(sess: Session, cfg: ast::crate_cfg, input: input) -> @ast::crate { match input { - file_input(file) => { - parse::parse_crate_from_file(&file, cfg, sess.parse_sess) + file_input(ref file) => { + parse::parse_crate_from_file(&(*file), cfg, sess.parse_sess) } - str_input(src) => { + str_input(ref src) => { // FIXME (#2319): Don't really want to box the source string parse::parse_crate_from_source_str( - anon_src(), @src, cfg, sess.parse_sess) + anon_src(), @(*src), cfg, sess.parse_sess) } } } @@ -337,10 +337,10 @@ fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: input, pp::space(s.s); pprust::synth_comment(s, int::to_str(item.id, 10u)); } - pprust::node_block(s, blk) => { + pprust::node_block(s, ref blk) => { pp::space(s.s); pprust::synth_comment(s, - ~"block " + int::to_str(blk.node.id, 10u)); + ~"block " + int::to_str((*blk).node.id, 10u)); } pprust::node_expr(s, expr) => { pp::space(s.s); @@ -563,7 +563,7 @@ fn build_session_options(binary: ~str, let target = match target_opt { None => host_triple(), - Some(s) => s + Some(ref s) => (*s) }; let addl_lib_search_paths = @@ -743,15 +743,15 @@ fn build_output_filenames(input: input, // have to make up a name // We want to toss everything after the final '.' let dirpath = match *odir { - Some(d) => d, + Some(ref d) => (*d), None => match input { str_input(_) => os::getcwd(), - file_input(ifile) => ifile.dir_path() + file_input(ref ifile) => (*ifile).dir_path() } }; let stem = match input { - file_input(ifile) => ifile.filestem().get(), + file_input(ref ifile) => (*ifile).filestem().get(), str_input(_) => ~"rust_out" }; @@ -764,12 +764,12 @@ fn build_output_filenames(input: input, } } - Some(out_file) => { - out_path = out_file; + Some(ref out_file) => { + out_path = (*out_file); obj_path = if stop_after_codegen { - out_file + (*out_file) } else { - out_file.with_filetype(obj_suffix) + (*out_file).with_filetype(obj_suffix) }; if sess.building_library { diff --git a/src/librustc/lib/llvm.rs b/src/librustc/lib/llvm.rs index 2fdc15b330b..3f24d280437 100644 --- a/src/librustc/lib/llvm.rs +++ b/src/librustc/lib/llvm.rs @@ -1086,7 +1086,7 @@ fn type_to_str(names: type_names, ty: TypeRef) -> ~str { fn type_to_str_inner(names: type_names, outer0: ~[TypeRef], ty: TypeRef) -> ~str { match type_has_name(names, ty) { - option::Some(n) => return n, + option::Some(ref n) => return (*n), _ => {} } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 5a1b49b1a4c..9c734f8e698 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -125,7 +125,7 @@ fn visit_item(e: env, i: @ast::item) { if abi != ast::foreign_abi_cdecl && abi != ast::foreign_abi_stdcall { return; } } - either::Left(msg) => e.diag.span_fatal(i.span, msg) + either::Left(ref msg) => e.diag.span_fatal(i.span, (*msg)) } let cstore = e.cstore; @@ -137,13 +137,13 @@ fn visit_item(e: env, i: @ast::item) { let foreign_name = match attr::first_attr_value_str_by_name(i.attrs, ~"link_name") { - Some(nn) => { - if nn == ~"" { + Some(ref nn) => { + if (*nn) == ~"" { e.diag.span_fatal( i.span, ~"empty #[link_name] not allowed; use #[nolink]."); } - nn + (*nn) } None => *e.intr.get(i.ident) }; @@ -161,8 +161,8 @@ fn visit_item(e: env, i: @ast::item) { for link_args.each |a| { match attr::get_meta_item_value_str(attr::attr_meta(*a)) { - Some(linkarg) => { - cstore::add_used_link_args(cstore, linkarg); + Some(ref linkarg) => { + cstore::add_used_link_args(cstore, (*linkarg)); } None => {/* fallthrough */ } } @@ -236,7 +236,7 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item], let cname = match attr::last_meta_item_value_str_by_name(metas, ~"name") { - option::Some(v) => v, + option::Some(ref v) => (*v), option::None => *e.intr.get(ident) }; let cmeta = @{name: cname, data: cdata, diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 07636481de2..d1c71b95191 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -603,7 +603,7 @@ fn maybe_get_item_ast(intr: @ident_interner, cdata: cmd, tcx: ty::ctxt, let item_doc = lookup_item(id, cdata.data); let path = vec::init(item_path(intr, item_doc)); match decode_inlined_item(cdata, tcx, path, item_doc) { - Some(ii) => csearch::found(ii), + Some(ref ii) => csearch::found((*ii)), None => { match item_parent_item(item_doc) { Some(did) => { @@ -611,7 +611,7 @@ fn maybe_get_item_ast(intr: @ident_interner, cdata: cmd, tcx: ty::ctxt, let parent_item = lookup_item(did.node, cdata.data); match decode_inlined_item(cdata, tcx, path, parent_item) { - Some(ii) => csearch::found_parent(did, ii), + Some(ref ii) => csearch::found_parent(did, (*ii)), None => csearch::not_found } } @@ -635,7 +635,7 @@ fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::node_id, tcx, cdata); let name = item_name(intr, item); let arg_tys = match ty::get(ctor_ty).sty { - ty::ty_fn(f) => f.sig.inputs.map(|a| a.ty), + ty::ty_fn(ref f) => (*f).sig.inputs.map(|a| a.ty), // Nullary enum variant. _ => ~[], @@ -750,7 +750,7 @@ fn get_trait_methods(intr: @ident_interner, cdata: cmd, id: ast::node_id, let ty = doc_type(mth, tcx, cdata); let def_id = item_def_id(mth, cdata); let fty = match ty::get(ty).sty { - ty::ty_fn(f) => f, + ty::ty_fn(ref f) => (*f), _ => { tcx.diag.handler().bug( ~"get_trait_methods: id has non-function type"); @@ -781,7 +781,7 @@ fn get_provided_trait_methods(intr: @ident_interner, cdata: cmd, let fty; match ty::get(ty).sty { - ty::ty_fn(f) => fty = f, + ty::ty_fn(ref f) => fty = (*f), _ => { tcx.diag.handler().bug(~"get_provided_trait_methods(): id \ has non-function type"); @@ -1104,7 +1104,7 @@ fn get_crate_vers(data: @~[u8]) -> ~str { let attrs = decoder::get_crate_attributes(data); return match attr::last_meta_item_value_str_by_name( attr::find_linkage_metas(attrs), ~"vers") { - Some(ver) => ver, + Some(ref ver) => (*ver), None => ~"0.0" }; } diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index f9d485cf83d..f48cdea64d0 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -218,7 +218,7 @@ fn encode_type(ecx: @encode_ctxt, ebml_w: Writer::Serializer, typ: ty::t) { fn encode_symbol(ecx: @encode_ctxt, ebml_w: Writer::Serializer, id: node_id) { ebml_w.start_tag(tag_items_data_item_symbol); let sym = match ecx.item_symbols.find(id) { - Some(x) => x, + Some(ref x) => (*x), None => { ecx.diag.handler().bug( fmt!("encode_symbol: id not found %d", id)); @@ -341,9 +341,9 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: Writer::Serializer, // Encode the reexports of this module. debug!("(encoding info for module) encoding reexports for %d", id); match ecx.reexports2.find(id) { - Some(exports) => { + Some(ref exports) => { debug!("(encoding info for module) found reexports for %d", id); - for exports.each |exp| { + for (*exports).each |exp| { debug!("(encoding info for module) reexport '%s' for %d", exp.name, id); ebml_w.start_tag(tag_items_data_item_reexport); @@ -483,8 +483,8 @@ fn encode_info_for_ctor(ecx: @encode_ctxt, ebml_w: Writer::Serializer, encode_type(ecx, ebml_w, its_ty); encode_path(ecx, ebml_w, path, ast_map::path_name(ident)); match item { - Some(it) => { - (ecx.encode_inlined_item)(ecx, ebml_w, path, it); + Some(ref it) => { + (ecx.encode_inlined_item)(ecx, ebml_w, path, (*it)); } None => { encode_symbol(ecx, ebml_w, id); @@ -622,7 +622,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: Writer::Serializer, encode_region_param(ecx, ebml_w, item); ebml_w.end_tag(); } - item_enum(enum_definition, tps) => { + item_enum(ref enum_definition, tps) => { add_to_index(); do ebml_w.wr_tag(tag_items_data_item) { encode_def_id(ebml_w, local_def(item.id)); @@ -630,7 +630,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: Writer::Serializer, encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_name(ecx, ebml_w, item.ident); - for enum_definition.variants.each |v| { + for (*enum_definition).variants.each |v| { encode_variant_id(ebml_w, local_def(v.node.id)); } (ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item)); @@ -638,7 +638,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: Writer::Serializer, encode_region_param(ecx, ebml_w, item); } encode_enum_variant_info(ecx, ebml_w, item.id, - enum_definition.variants, path, index, tps); + (*enum_definition).variants, path, index, tps); } item_class(struct_def, tps) => { /* First, encode the fields and methods @@ -764,7 +764,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: Writer::Serializer, vec::append(tps, m.tps)); } } - item_trait(tps, traits, ms) => { + item_trait(tps, traits, ref ms) => { let provided_methods = dvec::DVec(); add_to_index(); @@ -778,12 +778,12 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: Writer::Serializer, encode_attributes(ebml_w, item.attrs); let mut i = 0u; for vec::each(*ty::trait_methods(tcx, local_def(item.id))) |mty| { - match ms[i] { - required(ty_m) => { + match (*ms)[i] { + required(ref ty_m) => { ebml_w.start_tag(tag_item_trait_method); - encode_def_id(ebml_w, local_def(ty_m.id)); + encode_def_id(ebml_w, local_def((*ty_m).id)); encode_name(ecx, ebml_w, mty.ident); - encode_type_param_bounds(ebml_w, ecx, ty_m.tps); + encode_type_param_bounds(ebml_w, ecx, (*ty_m).tps); encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty)); encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity)); encode_self_type(ebml_w, mty.self_ty); @@ -816,7 +816,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: Writer::Serializer, // method info, we output static methods with type signatures as // written. Here, we output the *real* type signatures. I feel like // maybe we should only ever handle the real type signatures. - for vec::each(ms) |m| { + for vec::each((*ms)) |m| { let ty_m = ast_util::trait_method_to_ty_method(*m); if ty_m.self_ty.node != ast::sty_static { loop; } @@ -971,19 +971,19 @@ fn write_int(writer: io::Writer, &&n: int) { fn encode_meta_item(ebml_w: Writer::Serializer, mi: meta_item) { match mi.node { - meta_word(name) => { + meta_word(ref name) => { ebml_w.start_tag(tag_meta_item_word); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(str::to_bytes(name)); + ebml_w.writer.write(str::to_bytes((*name))); ebml_w.end_tag(); ebml_w.end_tag(); } - meta_name_value(name, value) => { + meta_name_value(ref name, value) => { match value.node { lit_str(value) => { ebml_w.start_tag(tag_meta_item_name_value); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(str::to_bytes(name)); + ebml_w.writer.write(str::to_bytes((*name))); ebml_w.end_tag(); ebml_w.start_tag(tag_meta_item_value); ebml_w.writer.write(str::to_bytes(*value)); @@ -993,10 +993,10 @@ fn encode_meta_item(ebml_w: Writer::Serializer, mi: meta_item) { _ => {/* FIXME (#623): encode other variants */ } } } - meta_list(name, items) => { + meta_list(ref name, items) => { ebml_w.start_tag(tag_meta_item_list); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(str::to_bytes(name)); + ebml_w.writer.write(str::to_bytes((*name))); ebml_w.end_tag(); for items.each |inner_item| { encode_meta_item(ebml_w, **inner_item); diff --git a/src/librustc/metadata/filesearch.rs b/src/librustc/metadata/filesearch.rs index 5d03bbf4f6f..be4968636f1 100644 --- a/src/librustc/metadata/filesearch.rs +++ b/src/librustc/metadata/filesearch.rs @@ -53,11 +53,11 @@ fn mk_filesearch(maybe_sysroot: Option<Path>, make_target_lib_path(&self.sysroot, self.target_triple)); match get_cargo_lib_path_nearest() { - result::Ok(p) => paths.push(p), + result::Ok(ref p) => paths.push((*p)), result::Err(_) => () } match get_cargo_lib_path() { - result::Ok(p) => paths.push(p), + result::Ok(ref p) => paths.push((*p)), result::Err(_) => () } paths @@ -110,14 +110,14 @@ fn make_target_lib_path(sysroot: &Path, fn get_default_sysroot() -> Path { match os::self_exe_path() { - option::Some(p) => p.pop(), + option::Some(ref p) => (*p).pop(), option::None => fail ~"can't determine value for sysroot" } } fn get_sysroot(maybe_sysroot: Option<Path>) -> Path { match maybe_sysroot { - option::Some(sr) => sr, + option::Some(ref sr) => (*sr), option::None => get_default_sysroot() } } @@ -128,9 +128,9 @@ fn get_cargo_sysroot() -> Result<Path, ~str> { fn get_cargo_root() -> Result<Path, ~str> { match os::getenv(~"CARGO_ROOT") { - Some(_p) => result::Ok(Path(_p)), + Some(ref _p) => result::Ok(Path((*_p))), None => match os::homedir() { - Some(_q) => result::Ok(_q.push(".cargo")), + Some(ref _q) => result::Ok((*_q).push(".cargo")), None => result::Err(~"no CARGO_ROOT or home directory") } } diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 512bf454750..b1a4fcc24cd 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -50,7 +50,7 @@ type ctxt = { fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} { match find_library_crate(cx) { - Some(t) => return t, + Some(ref t) => return (*t), None => { cx.diag.span_fatal( cx.span, fmt!("can't find crate for `%s`", @@ -135,7 +135,7 @@ fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> ~str { match vec::last_opt(name_items) { Some(i) => { match attr::get_meta_item_value_str(i) { - Some(n) => n, + Some(ref n) => (*n), // FIXME (#2406): Probably want a warning here since the user // is using the wrong type of meta item. _ => fail diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 9dbdf9fe493..a928d2d55bb 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -118,11 +118,11 @@ fn enc_mt(w: io::Writer, cx: @ctxt, mt: ty::mt) { } fn enc_opt<T>(w: io::Writer, t: Option<T>, enc_f: fn(T)) { - match t { - None => w.write_char('n'), - Some(v) => { + match &t { + &None => w.write_char('n'), + &Some(ref v) => { w.write_char('s'); - enc_f(v); + enc_f((*v)); } } } @@ -237,18 +237,18 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) { ty_f64 => w.write_str(&"MF"), } } - ty::ty_enum(def, substs) => { + ty::ty_enum(def, ref substs) => { w.write_str(&"t["); w.write_str((cx.ds)(def)); w.write_char('|'); - enc_substs(w, cx, substs); + enc_substs(w, cx, (*substs)); w.write_char(']'); } - ty::ty_trait(def, substs, vstore) => { + ty::ty_trait(def, ref substs, vstore) => { w.write_str(&"x["); w.write_str((cx.ds)(def)); w.write_char('|'); - enc_substs(w, cx, substs); + enc_substs(w, cx, (*substs)); enc_vstore(w, cx, vstore); w.write_char(']'); } @@ -284,8 +284,8 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) { } w.write_char(']'); } - ty::ty_fn(f) => { - enc_ty_fn(w, cx, f); + ty::ty_fn(ref f) => { + enc_ty_fn(w, cx, (*f)); } ty::ty_infer(ty::TyVar(id)) => { w.write_char('X'); @@ -316,7 +316,7 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) { enc_proto(w, p); } ty::ty_opaque_box => w.write_char('B'), - ty::ty_class(def, substs) => { + ty::ty_class(def, ref substs) => { debug!("~~~~ %s", ~"a["); w.write_str(&"a["); let s = (cx.ds)(def); @@ -324,7 +324,7 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) { w.write_str(s); debug!("~~~~ %s", ~"|"); w.write_char('|'); - enc_substs(w, cx, substs); + enc_substs(w, cx, (*substs)); debug!("~~~~ %s", ~"]"); w.write_char(']'); } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 49d3fdab437..87d7710a289 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -282,11 +282,11 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { ast::ii_foreign(i) => { ast::ii_foreign(fld.fold_foreign_item(i)) } - ast::ii_dtor(dtor, nm, tps, parent_id) => { - let dtor_body = fld.fold_block(dtor.node.body); + ast::ii_dtor(ref dtor, nm, tps, parent_id) => { + let dtor_body = fld.fold_block((*dtor).node.body); ast::ii_dtor({node: {body: dtor_body, - .. dtor.node}, - .. dtor}, nm, tps, parent_id) + .. (*dtor).node}, + .. (*dtor)}, nm, tps, parent_id) } } } @@ -315,16 +315,16 @@ fn renumber_ast(xcx: extended_decode_ctxt, ii: ast::inlined_item) ast::ii_foreign(i) => { ast::ii_foreign(fld.fold_foreign_item(i)) } - ast::ii_dtor(dtor, nm, tps, parent_id) => { - let dtor_body = fld.fold_block(dtor.node.body); - let dtor_attrs = fld.fold_attributes(dtor.node.attrs); + ast::ii_dtor(ref dtor, nm, tps, parent_id) => { + let dtor_body = fld.fold_block((*dtor).node.body); + let dtor_attrs = fld.fold_attributes((*dtor).node.attrs); let new_params = fold::fold_ty_params(tps, fld); - let dtor_id = fld.new_id(dtor.node.id); + let dtor_id = fld.new_id((*dtor).node.id); let new_parent = xcx.tr_def_id(parent_id); - let new_self = fld.new_id(dtor.node.self_id); + let new_self = fld.new_id((*dtor).node.self_id); ast::ii_dtor({node: {id: dtor_id, attrs: dtor_attrs, self_id: new_self, body: dtor_body}, - .. dtor}, + .. (*dtor)}, nm, new_params, new_parent) } } @@ -497,8 +497,8 @@ impl method_origin: tr { typeck::method_static(did) => { typeck::method_static(did.tr(xcx)) } - typeck::method_param(mp) => { - typeck::method_param({trait_id:mp.trait_id.tr(xcx),.. mp}) + typeck::method_param(ref mp) => { + typeck::method_param({trait_id:(*mp).trait_id.tr(xcx),.. (*mp)}) } typeck::method_trait(did, m, vstore) => { typeck::method_trait(did.tr(xcx), m, vstore) diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index 1a7c78dce14..7ff7c48f54a 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -51,9 +51,9 @@ impl purity_cause : cmp::Eq { _ => false } } - pc_cmt(e0a) => { + pc_cmt(ref e0a) => { match (*other) { - pc_cmt(e0b) => e0a == e0b, + pc_cmt(ref e0b) => (*e0a) == (*e0b), _ => false } } @@ -131,7 +131,7 @@ impl check_loan_ctxt { loop { match pure_map.find(scope_id) { None => (), - Some(e) => return Some(pc_cmt(e)) + Some(ref e) => return Some(pc_cmt((*e))) } match region_map.find(scope_id) { @@ -224,14 +224,14 @@ impl check_loan_ctxt { let callee_ty = ty::node_id_to_type(tcx, callee_id); match ty::get(callee_ty).sty { - ty::ty_fn(fn_ty) => { - match fn_ty.meta.purity { + ty::ty_fn(ref fn_ty) => { + match (*fn_ty).meta.purity { ast::pure_fn => return, // case (c) above ast::impure_fn | ast::unsafe_fn | ast::extern_fn => { self.report_purity_error( pc, callee_span, fmt!("access to %s function", - pprust::purity_to_str(fn_ty.meta.purity))); + pprust::purity_to_str((*fn_ty).meta.purity))); } } } @@ -369,7 +369,7 @@ impl check_loan_ctxt { // is not visible from the outside match self.purity(ex.id) { None => (), - Some(pc @ pc_cmt(_)) => { + Some(pc_cmt(_)) => { // Subtle: Issue #3162. If we are enforcing purity // because there is a reference to aliasable, mutable data // that we require to be immutable, we can't allow writes @@ -377,7 +377,9 @@ impl check_loan_ctxt { // because that aliasable data might have been located on // the current stack frame, we don't know. self.report_purity_error( - pc, ex.span, at.ing_form(self.bccx.cmt_to_str(cmt))); + self.purity(ex.id).get(), + ex.span, + at.ing_form(self.bccx.cmt_to_str(cmt))); } Some(pc_pure_fn) => { if cmt.lp.is_none() { @@ -446,13 +448,13 @@ impl check_loan_ctxt { sp, fmt!("%s prohibited in pure context", msg)); } - pc_cmt(e) => { - if self.reported.insert(e.cmt.id, ()) { + pc_cmt(ref e) => { + if self.reported.insert((*e).cmt.id, ()) { self.tcx().sess.span_err( - e.cmt.span, + (*e).cmt.span, fmt!("illegal borrow unless pure: %s", - self.bccx.bckerr_to_str(e))); - self.bccx.note_and_explain_bckerr(e); + self.bccx.bckerr_to_str((*e)))); + self.bccx.note_and_explain_bckerr((*e)); self.tcx().sess.span_note( sp, fmt!("impure due to %s", msg)); @@ -538,12 +540,12 @@ impl check_loan_ctxt { args: ~[@ast::expr]) { match self.purity(expr.id) { None => {} - Some(pc) => { + Some(ref pc) => { self.check_pure_callee_or_arg( - pc, callee, callee_id, callee_span); + (*pc), callee, callee_id, callee_span); for args.each |arg| { self.check_pure_callee_or_arg( - pc, Some(*arg), arg.id, arg.span); + (*pc), Some(*arg), arg.id, arg.span); } } } diff --git a/src/librustc/middle/borrowck/gather_loans.rs b/src/librustc/middle/borrowck/gather_loans.rs index 5383db78aec..3320414ee0f 100644 --- a/src/librustc/middle/borrowck/gather_loans.rs +++ b/src/librustc/middle/borrowck/gather_loans.rs @@ -150,8 +150,8 @@ fn req_loans_in_expr(ex: @ast::expr, } match self.bccx.method_map.find(ex.id) { - Some(method_map_entry) => { - match method_map_entry.explicit_self { + Some(ref method_map_entry) => { + match (*method_map_entry).explicit_self { ast::sty_by_ref => { let rcvr_cmt = self.bccx.cat_expr(rcvr); self.guarantee_valid(rcvr_cmt, m_imm, scope_r); @@ -167,9 +167,9 @@ fn req_loans_in_expr(ex: @ast::expr, visit::visit_expr(ex, self, vt); } - ast::expr_match(ex_v, arms) => { + ast::expr_match(ex_v, ref arms) => { let cmt = self.bccx.cat_expr(ex_v); - for arms.each |arm| { + for (*arms).each |arm| { for arm.pats.each |pat| { self.gather_pat(cmt, *pat, arm.body.node.id, ex.id); } @@ -228,19 +228,19 @@ fn req_loans_in_expr(ex: @ast::expr, } // see explanation attached to the `root_ub` field: - ast::expr_while(cond, body) => { + ast::expr_while(cond, ref body) => { // during the condition, can only root for the condition self.root_ub = cond.id; (vt.visit_expr)(cond, self, vt); // during body, can only root for the body - self.root_ub = body.node.id; - (vt.visit_block)(body, self, vt); + self.root_ub = (*body).node.id; + (vt.visit_block)((*body), self, vt); } // see explanation attached to the `root_ub` field: - ast::expr_loop(body, _) => { - self.root_ub = body.node.id; + ast::expr_loop(ref body, _) => { + self.root_ub = (*body).node.id; visit::visit_expr(ex, self, vt); } @@ -331,7 +331,7 @@ impl gather_loan_ctxt { // error will be reported. Some(_) => { match self.bccx.loan(cmt, scope_r, req_mutbl) { - Err(e) => { self.bccx.report(e); } + Err(ref e) => { self.bccx.report((*e)); } Ok(move loans) => { self.add_loans(cmt, req_mutbl, scope_r, move loans); } @@ -364,8 +364,8 @@ impl gather_loan_ctxt { // rooted. good. self.bccx.stable_paths += 1; } - Ok(pc_if_pure(e)) => { - debug!("result of preserve: %?", pc_if_pure(e)); + Ok(pc_if_pure(ref e)) => { + debug!("result of preserve: %?", pc_if_pure((*e))); // we are only able to guarantee the validity if // the scope is pure @@ -374,7 +374,7 @@ impl gather_loan_ctxt { // if the scope is some block/expr in the // fn, then just require that this scope // be pure - self.req_maps.pure_map.insert(pure_id, e); + self.req_maps.pure_map.insert(pure_id, (*e)); self.bccx.req_pure_paths += 1; debug!("requiring purity for scope %?", @@ -390,14 +390,14 @@ impl gather_loan_ctxt { // otherwise, we can't enforce purity for // that scope, so give up and report an // error - self.bccx.report(e); + self.bccx.report((*e)); } } } - Err(e) => { + Err(ref e) => { // we cannot guarantee the validity of this pointer debug!("result of preserve: error"); - self.bccx.report(e); + self.bccx.report((*e)); } } } diff --git a/src/librustc/middle/borrowck/loan.rs b/src/librustc/middle/borrowck/loan.rs index 649c3e0eefe..7dc68fa0b4b 100644 --- a/src/librustc/middle/borrowck/loan.rs +++ b/src/librustc/middle/borrowck/loan.rs @@ -25,7 +25,7 @@ impl borrowck_ctxt { loans: ~[] }; match lc.loan(cmt, mutbl) { - Err(e) => Err(e), + Err(ref e) => Err((*e)), Ok(()) => { let LoanContext {loans, _} = move lc; Ok(loans) diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index de7b7c37c3d..c5a02c978f9 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -500,7 +500,7 @@ impl borrowck_ctxt { fn report_if_err(bres: bckres<()>) { match bres { Ok(()) => (), - Err(e) => self.report(e) + Err(ref e) => self.report((*e)) } } diff --git a/src/librustc/middle/borrowck/preserve.rs b/src/librustc/middle/borrowck/preserve.rs index a8201cc9d11..1243b9baf5f 100644 --- a/src/librustc/middle/borrowck/preserve.rs +++ b/src/librustc/middle/borrowck/preserve.rs @@ -178,9 +178,9 @@ priv impl &preserve_ctxt { debug!("must root @T, otherwise purity req'd"); self.attempt_root(cmt, base, derefs) } - Err(e) => { + Err(ref e) => { debug!("must root @T, err: %s", - self.bccx.bckerr_to_str(e)); + self.bccx.bckerr_to_str((*e))); self.attempt_root(cmt, base, derefs) } } @@ -274,13 +274,13 @@ priv impl &preserve_ctxt { } // the base requires purity too, that's fine - Ok(pc_if_pure(e)) => { - Ok(pc_if_pure(e)) + Ok(pc_if_pure(ref e)) => { + Ok(pc_if_pure((*e))) } // base is not stable, doesn't matter - Err(e) => { - Err(e) + Err(ref e) => { + Err((*e)) } } } diff --git a/src/librustc/middle/check_alt.rs b/src/librustc/middle/check_alt.rs index 7ec20dddffe..bb50597dab7 100644 --- a/src/librustc/middle/check_alt.rs +++ b/src/librustc/middle/check_alt.rs @@ -35,13 +35,13 @@ fn check_crate(tcx: ty::ctxt, crate: @crate) { fn check_expr(tcx: ty::ctxt, ex: @expr, &&s: (), v: visit::vt<()>) { visit::visit_expr(ex, s, v); match ex.node { - expr_match(scrut, arms) => { - check_arms(tcx, arms); + expr_match(scrut, ref arms) => { + check_arms(tcx, (*arms)); /* Check for exhaustiveness */ // Check for empty enum, because is_useful only works on inhabited // types. let pat_ty = node_id_to_type(tcx, scrut.id); - if arms.is_empty() { + if (*arms).is_empty() { if !type_is_empty(tcx, pat_ty) { // We know the type is inhabited, so this must be wrong tcx.sess.span_err(ex.span, fmt!("non-exhaustive patterns: \ @@ -52,14 +52,14 @@ fn check_expr(tcx: ty::ctxt, ex: @expr, &&s: (), v: visit::vt<()>) { } match ty::get(pat_ty).sty { ty_enum(did, _) => { - if (*enum_variants(tcx, did)).is_empty() && arms.is_empty() { + if (*enum_variants(tcx, did)).is_empty() && (*arms).is_empty() { return; } } _ => { /* We assume only enum types can be uninhabited */ } } - let arms = vec::concat(vec::filter_map(arms, unguarded_pat)); + let arms = vec::concat(vec::filter_map((*arms), unguarded_pat)); check_exhaustive(tcx, ex.span, arms); } _ => () @@ -95,17 +95,17 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: ~[@pat]) { let ext = match is_useful(tcx, vec::map(pats, |p| ~[*p]), ~[wild()]) { not_useful => return, // This is good, wildcard pattern isn't reachable useful_ => None, - useful(ty, ctor) => { + useful(ty, ref ctor) => { match ty::get(ty).sty { ty::ty_bool => { - match ctor { + match (*ctor) { val(const_bool(true)) => Some(~"true"), val(const_bool(false)) => Some(~"false"), _ => None } } ty::ty_enum(id, _) => { - let vid = match ctor { variant(id) => id, + let vid = match (*ctor) { variant(id) => id, _ => fail ~"check_exhaustive: non-variant ctor" }; match vec::find(*ty::enum_variants(tcx, id), |v| v.id == vid) { @@ -118,7 +118,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: ~[@pat]) { } }; let msg = ~"non-exhaustive patterns" + match ext { - Some(s) => ~": " + s + ~" not covered", + Some(ref s) => ~": " + (*s) + ~" not covered", None => ~"" }; tcx.sess.span_err(sp, msg); @@ -140,9 +140,9 @@ impl ctor : cmp::Eq { match ((*self), (*other)) { (single, single) => true, (variant(did_self), variant(did_other)) => did_self == did_other, - (val(cv_self), val(cv_other)) => cv_self == cv_other, - (range(cv0_self, cv1_self), range(cv0_other, cv1_other)) => { - cv0_self == cv0_other && cv1_self == cv1_other + (val(ref cv_self), val(ref cv_other)) => (*cv_self) == (*cv_other), + (range(ref cv0_self, ref cv1_self), range(ref cv0_other, ref cv1_other)) => { + (*cv0_self) == (*cv0_other) && (*cv1_self) == (*cv1_other) } (single, _) | (variant(_), _) | (val(_), _) | (range(*), _) => { false @@ -186,7 +186,7 @@ fn is_useful(tcx: ty::ctxt, m: matrix, v: ~[@pat]) -> useful { is_useful_specialized(tcx, m, v, val(const_bool(false)), 0u, left_ty) } - u => u + ref u => (*u) } } ty::ty_enum(eid, _) => { @@ -194,7 +194,7 @@ fn is_useful(tcx: ty::ctxt, m: matrix, v: ~[@pat]) -> useful { match is_useful_specialized(tcx, m, v, variant(va.id), va.args.len(), left_ty) { not_useful => (), - u => return u + ref u => return (*u) } } not_useful @@ -205,18 +205,18 @@ fn is_useful(tcx: ty::ctxt, m: matrix, v: ~[@pat]) -> useful { } } } - Some(ctor) => { + Some(ref ctor) => { match is_useful(tcx, vec::filter_map(m, |r| default(tcx, *r) ), vec::tail(v)) { - useful_ => useful(left_ty, ctor), - u => u + useful_ => useful(left_ty, (*ctor)), + ref u => (*u) } } } } - Some(v0_ctor) => { - let arity = ctor_arity(tcx, v0_ctor, left_ty); - is_useful_specialized(tcx, m, v, v0_ctor, arity, left_ty) + Some(ref v0_ctor) => { + let arity = ctor_arity(tcx, (*v0_ctor), left_ty); + is_useful_specialized(tcx, m, v, (*v0_ctor), arity, left_ty) } } } @@ -228,7 +228,7 @@ fn is_useful_specialized(tcx: ty::ctxt, m: matrix, v: ~[@pat], ctor: ctor, tcx, ms, specialize(tcx, v, ctor, arity, lty).get()); match could_be_useful { useful_ => useful(lty, ctor), - u => u + ref u => (*u) } } @@ -362,10 +362,10 @@ fn specialize(tcx: ty::ctxt, r: ~[@pat], ctor_id: ctor, arity: uint, let const_expr = lookup_const_by_id(tcx, did).get(); let e_v = eval_const_expr(tcx, const_expr); let match_ = match ctor_id { - val(v) => compare_const_vals(e_v, v) == 0, - range(c_lo, c_hi) => { - compare_const_vals(c_lo, e_v) >= 0 && - compare_const_vals(c_hi, e_v) <= 0 + val(ref v) => compare_const_vals(e_v, (*v)) == 0, + range(ref c_lo, ref c_hi) => { + compare_const_vals((*c_lo), e_v) >= 0 && + compare_const_vals((*c_hi), e_v) <= 0 } single => true, _ => fail ~"type error" @@ -456,10 +456,10 @@ fn specialize(tcx: ty::ctxt, r: ~[@pat], ctor_id: ctor, arity: uint, pat_lit(expr) => { let e_v = eval_const_expr(tcx, expr); let match_ = match ctor_id { - val(v) => compare_const_vals(e_v, v) == 0, - range(c_lo, c_hi) => { - compare_const_vals(c_lo, e_v) >= 0 && - compare_const_vals(c_hi, e_v) <= 0 + val(ref v) => compare_const_vals(e_v, (*v)) == 0, + range(ref c_lo, ref c_hi) => { + compare_const_vals((*c_lo), e_v) >= 0 && + compare_const_vals((*c_hi), e_v) <= 0 } single => true, _ => fail ~"type error" @@ -468,8 +468,8 @@ fn specialize(tcx: ty::ctxt, r: ~[@pat], ctor_id: ctor, arity: uint, } pat_range(lo, hi) => { let (c_lo, c_hi) = match ctor_id { - val(v) => (v, v), - range(lo, hi) => (lo, hi), + val(ref v) => ((*v), (*v)), + range(ref lo, ref hi) => ((*lo), (*hi)), single => return Some(vec::tail(r)), _ => fail ~"type error" }; diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index 2b7516a3a67..4639c1f31f3 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -35,8 +35,8 @@ fn check_item(sess: Session, ast_map: ast_map::map, (v.visit_expr)(ex, true, v); check_item_recursion(sess, ast_map, def_map, it); } - item_enum(enum_definition, _) => { - for enum_definition.variants.each |var| { + item_enum(ref enum_definition, _) => { + for (*enum_definition).variants.each |var| { do option::iter(&var.node.disr_expr) |ex| { (v.visit_expr)(*ex, true, v); } diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index 433b1f81099..5ff186126fb 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -22,23 +22,23 @@ fn check_crate(tcx: ty::ctxt, crate: @crate) { }, visit_expr: |e: @expr, cx: ctx, v: visit::vt<ctx>| { match e.node { - expr_while(e, b) => { + expr_while(e, ref b) => { (v.visit_expr)(e, cx, v); - (v.visit_block)(b, {in_loop: true,.. cx}, v); + (v.visit_block)((*b), {in_loop: true,.. cx}, v); } - expr_loop(b, _) => { - (v.visit_block)(b, {in_loop: true,.. cx}, v); + expr_loop(ref b, _) => { + (v.visit_block)((*b), {in_loop: true,.. cx}, v); } expr_fn(_, _, _, _) => { visit::visit_expr(e, {in_loop: false, can_ret: true}, v); } - expr_fn_block(_, b, _) => { - (v.visit_block)(b, {in_loop: false, can_ret: false}, v); + expr_fn_block(_, ref b, _) => { + (v.visit_block)((*b), {in_loop: false, can_ret: false}, v); } - expr_loop_body(@{node: expr_fn_block(_, b, _), _}) => { + expr_loop_body(@{node: expr_fn_block(_, ref b, _), _}) => { let proto = ty::ty_fn_proto(ty::expr_ty(tcx, e)); let blk = (proto == ProtoBorrowed); - (v.visit_block)(b, {in_loop: true, can_ret: blk}, v); + (v.visit_block)((*b), {in_loop: true, can_ret: blk}, v); } expr_break(_) => { if !cx.in_loop { diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 389412b2413..5b3eb1e7266 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -107,9 +107,9 @@ fn classify(e: @expr, } } - ast::expr_struct(_, fs, None) | - ast::expr_rec(fs, None) => { - let cs = do vec::map(fs) |f| { + ast::expr_struct(_, ref fs, None) | + ast::expr_rec(ref fs, None) => { + let cs = do vec::map((*fs)) |f| { if f.node.mutbl == ast::m_imm { classify(f.node.expr, def_map, tcx) } else { @@ -222,7 +222,7 @@ impl const_val : cmp::Eq { (const_float(a), const_float(b)) => a == b, (const_int(a), const_int(b)) => a == b, (const_uint(a), const_uint(b)) => a == b, - (const_str(a), const_str(b)) => a == b, + (const_str(ref a), const_str(ref b)) => (*a) == (*b), (const_bool(a), const_bool(b)) => a == b, (const_float(_), _) | (const_int(_), _) | (const_uint(_), _) | (const_str(_), _) | (const_bool(_), _) => false @@ -233,8 +233,8 @@ impl const_val : cmp::Eq { fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { match eval_const_expr_partial(tcx, e) { - Ok(r) => r, - Err(s) => fail s + Ok(ref r) => (*r), + Err(ref s) => fail (*s) } } @@ -250,7 +250,7 @@ fn eval_const_expr_partial(tcx: middle::ty::ctxt, e: @expr) Ok(const_uint(i)) => Ok(const_uint(-i)), Ok(const_str(_)) => Err(~"Negate on string"), Ok(const_bool(_)) => Err(~"Negate on boolean"), - err => err + ref err => (*err) } } expr_unary(not, inner) => { @@ -438,10 +438,10 @@ fn compare_const_vals(a: const_val, b: const_val) -> int { 1 } } - (const_str(a), const_str(b)) => { - if a == b { + (const_str(ref a), const_str(ref b)) => { + if (*a) == (*b) { 0 - } else if a < b { + } else if (*a) < (*b) { -1 } else { 1 diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index 5c6c99d7a1c..443c7286410 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -308,8 +308,8 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) { check_copy_ex(cx, ls, false, reason); check_copy_ex(cx, rs, false, reason); } - expr_rec(fields, def) | expr_struct(_, fields, def) => { - for fields.each |field| { maybe_copy(cx, field.node.expr, + expr_rec(ref fields, def) | expr_struct(_, ref fields, def) => { + for (*fields).each |field| { maybe_copy(cx, field.node.expr, Some(("record or struct fields require \ copyable arguments", ""))); } match def { @@ -318,13 +318,13 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) { let t = ty::expr_ty(cx.tcx, ex); let ty_fields = match ty::get(t).sty { ty::ty_rec(f) => f, - ty::ty_class(did, substs) => - ty::class_items_as_fields(cx.tcx, did, &substs), + ty::ty_class(did, ref substs) => + ty::class_items_as_fields(cx.tcx, did, &(*substs)), _ => cx.tcx.sess.span_bug(ex.span, ~"bad base expr type in record") }; for ty_fields.each |tf| { - if !vec::any(fields, |f| f.node.ident == tf.ident ) && + if !vec::any((*fields), |f| f.node.ident == tf.ident ) && !ty::kind_can_be_copied(ty::type_kind(cx.tcx, tf.mt.ty)) { cx.tcx.sess.span_err(e.span, ~"copying a noncopyable value"); @@ -593,7 +593,7 @@ fn check_cast_for_escaping_regions( // worries. let target_ty = ty::expr_ty(cx.tcx, target); let target_substs = match ty::get(target_ty).sty { - ty::ty_trait(_, substs, _) => {substs} + ty::ty_trait(_, ref substs, _) => {(*substs)} _ => { return; /* not a cast to a trait */ } }; diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 0a00ad88a60..919d24035f8 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -153,10 +153,10 @@ impl LanguageItemCollector { fn match_and_collect_meta_item(item_def_id: def_id, meta_item: meta_item) { match meta_item.node { - meta_name_value(key, literal) => { + meta_name_value(ref key, literal) => { match literal.node { lit_str(value) => { - self.match_and_collect_item(item_def_id, key, *value); + self.match_and_collect_item(item_def_id, (*key), *value); } _ => {} // Skip. } diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index ef67cb69ede..e290add25e9 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -303,8 +303,8 @@ impl ctxt { ast::meta_list(_, metas) => { for metas.each |meta| { match meta.node { - ast::meta_word(lintname) => { - triples.push((*meta, *level, lintname)); + ast::meta_word(ref lintname) => { + triples.push((*meta, *level, *lintname)); } _ => { self.sess.span_err( @@ -547,9 +547,9 @@ fn check_item_type_limits(cx: ty::ctxt, it: @ast::item) { let visit = item_stopping_visitor(visit::mk_simple_visitor(@{ visit_expr: fn@(e: @ast::expr) { match e.node { - ast::expr_binary(binop, @l, @r) => { - if is_comparison(binop) - && !check_limits(cx, binop, &l, &r) { + ast::expr_binary(ref binop, @ref l, @ref r) => { + if is_comparison(*binop) + && !check_limits(cx, *binop, l, r) { cx.sess.span_lint( type_limits, e.id, it.id, e.span, ~"comparison is useless due to type limits"); @@ -756,7 +756,7 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { ast::item_trait(*) => { check_case(cx, it.ident, it.id, it.id, it.span) } - ast::item_enum(enum_definition, _) => { + ast::item_enum(ref enum_definition, _) => { check_case(cx, it.ident, it.id, it.id, it.span); for enum_definition.variants.each |variant| { check_case(cx, variant.node.name, @@ -782,6 +782,7 @@ fn check_pat(tcx: ty::ctxt, pat: @ast::pat) { span, fmt!("binding `%s` should use ref or copy mode", tcx.sess.str_of(path_to_ident(path)))); + tcx.bad_bindings.insert(id, ()); } } } @@ -806,7 +807,7 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl, fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: ast::fn_decl, span: span, id: ast::node_id) { match ty::get(fn_ty).sty { - ty::ty_fn(fn_ty) => { + ty::ty_fn(ref fn_ty) => { let mut counter = 0; for vec::each2(fn_ty.sig.inputs, decl.inputs) |arg_ty, arg_ast| { counter += 1; diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 5ed06c2aab3..c24c267bc36 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -1058,7 +1058,7 @@ impl Liveness { self.propagate_through_expr(e, succ) } - expr_fn(_, _, blk, _) | expr_fn_block(_, blk, _) => { + expr_fn(_, _, ref blk, _) | expr_fn_block(_, ref blk, _) => { debug!("%s is an expr_fn or expr_fn_block", expr_to_str(expr, self.tcx.sess.intr())); @@ -1066,7 +1066,7 @@ impl Liveness { The next-node for a break is the successor of the entire loop. The next-node for a continue is the top of this loop. */ - self.with_loop_nodes(blk.node.id, succ, + self.with_loop_nodes((*blk).node.id, succ, self.live_node(expr.id, expr.span), || { // the construction of a closure itself is not important, @@ -1081,7 +1081,7 @@ impl Liveness { }) } - expr_if(cond, then, els) => { + expr_if(cond, ref then, els) => { // // (cond) // | @@ -1096,24 +1096,24 @@ impl Liveness { // ( succ ) // let else_ln = self.propagate_through_opt_expr(els, succ); - let then_ln = self.propagate_through_block(then, succ); + let then_ln = self.propagate_through_block((*then), succ); let ln = self.live_node(expr.id, expr.span); self.init_from_succ(ln, else_ln); self.merge_from_succ(ln, then_ln, false); self.propagate_through_expr(cond, ln) } - expr_while(cond, blk) => { - self.propagate_through_loop(expr, Some(cond), blk, succ) + expr_while(cond, ref blk) => { + self.propagate_through_loop(expr, Some(cond), (*blk), succ) } // Note that labels have been resolved, so we don't need to look // at the label ident - expr_loop(blk, _) => { - self.propagate_through_loop(expr, None, blk, succ) + expr_loop(ref blk, _) => { + self.propagate_through_loop(expr, None, (*blk), succ) } - expr_match(e, arms) => { + expr_match(e, ref arms) => { // // (e) // | @@ -1131,7 +1131,7 @@ impl Liveness { let ln = self.live_node(expr.id, expr.span); self.init_empty(ln, succ); let mut first_merge = true; - for arms.each |arm| { + for (*arms).each |arm| { let body_succ = self.propagate_through_block(arm.body, succ); let guard_succ = @@ -1223,16 +1223,16 @@ impl Liveness { self.propagate_through_expr(element, succ) } - expr_rec(fields, with_expr) => { + expr_rec(ref fields, with_expr) => { let succ = self.propagate_through_opt_expr(with_expr, succ); - do fields.foldr(succ) |field, succ| { + do (*fields).foldr(succ) |field, succ| { self.propagate_through_expr(field.node.expr, succ) } } - expr_struct(_, fields, with_expr) => { + expr_struct(_, ref fields, with_expr) => { let succ = self.propagate_through_opt_expr(with_expr, succ); - do fields.foldr(succ) |field, succ| { + do (*fields).foldr(succ) |field, succ| { self.propagate_through_expr(field.node.expr, succ) } } @@ -1294,8 +1294,8 @@ impl Liveness { succ } - expr_block(blk) => { - self.propagate_through_block(blk, succ) + expr_block(ref blk) => { + self.propagate_through_block((*blk), succ) } expr_mac(*) => { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index c38778d5dff..f202a81f049 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -315,7 +315,7 @@ fn opt_deref_kind(t: ty::t) -> Option<deref_kind> { Some(deref_ptr(uniq_ptr)) } - ty::ty_fn(f) if f.meta.proto == ast::ProtoUniq => { + ty::ty_fn(ref f) if (*f).meta.proto == ast::ProtoUniq => { Some(deref_ptr(uniq_ptr)) } @@ -325,8 +325,8 @@ fn opt_deref_kind(t: ty::t) -> Option<deref_kind> { Some(deref_ptr(region_ptr(r))) } - ty::ty_fn(f) if f.meta.proto == ast::ProtoBorrowed => { - Some(deref_ptr(region_ptr(f.meta.region))) + ty::ty_fn(ref f) if (*f).meta.proto == ast::ProtoBorrowed => { + Some(deref_ptr(region_ptr((*f).meta.region))) } ty::ty_box(*) | @@ -335,7 +335,7 @@ fn opt_deref_kind(t: ty::t) -> Option<deref_kind> { Some(deref_ptr(gc_ptr)) } - ty::ty_fn(f) if f.meta.proto == ast::ProtoBox => { + ty::ty_fn(ref f) if (*f).meta.proto == ast::ProtoBox => { Some(deref_ptr(gc_ptr)) } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 6ba46a71d4e..e2f95cd270f 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -131,14 +131,14 @@ fn check_crate(tcx: ty::ctxt, method_map: &method_map, crate: @ast::crate) { match tcx.items.find(trait_id.node) { Some(node_item(item, _)) => { match item.node { - item_trait(_, _, methods) => { - if method_num >= methods.len() { + item_trait(_, _, ref methods) => { + if method_num >= (*methods).len() { tcx.sess.span_bug(span, ~"method \ number \ out of \ range?!"); } - match methods[method_num] { + match (*methods)[method_num] { provided(method) if method.vis == private && !privileged_items @@ -212,10 +212,10 @@ fn check_crate(tcx: ty::ctxt, method_map: &method_map, crate: @ast::crate) { field access"); check_field(expr.span, id, ident); } - Some(entry) => { + Some(ref entry) => { debug!("(privacy checking) checking \ impl method"); - check_method(expr.span, &entry.origin); + check_method(expr.span, &(*entry).origin); } } } @@ -233,22 +233,22 @@ fn check_crate(tcx: ty::ctxt, method_map: &method_map, crate: @ast::crate) { ~"method call not in \ method map"); } - Some(entry) => { + Some(ref entry) => { debug!("(privacy checking) checking \ impl method"); - check_method(expr.span, &entry.origin); + check_method(expr.span, &(*entry).origin); } } } _ => {} } } - expr_struct(_, fields, _) => { + expr_struct(_, ref fields, _) => { match ty::get(ty::expr_ty(tcx, expr)).sty { ty_class(id, _) => { if id.crate != local_crate || !privileged_items.contains(&(id.node)) { - for fields.each |field| { + for (*fields).each |field| { debug!("(privacy checking) checking \ field in struct literal"); check_field(expr.span, id, @@ -261,7 +261,7 @@ fn check_crate(tcx: ty::ctxt, method_map: &method_map, crate: @ast::crate) { !privileged_items.contains(&(id.node)) { match tcx.def_map.get(expr.id) { def_variant(_, variant_id) => { - for fields.each |field| { + for (*fields).each |field| { debug!("(privacy checking) \ checking field in \ struct variant \ diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 658c19b6ac8..a1e5a98915a 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -697,8 +697,8 @@ fn determine_rp_in_ty(ty: @ast::Ty, visit_mt(mt, cx, visitor); } - ast::ty_rec(fields) => { - for fields.each |field| { + ast::ty_rec(ref fields) => { + for (*fields).each |field| { visit_mt(field.node.mt, cx, visitor); } } diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 21bd01b6f91..aeefbef64ee 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -635,7 +635,7 @@ impl NameBindings { /// Returns the module node if applicable. fn get_module_if_available() -> Option<@Module> { match self.type_def { - Some(type_def) => type_def.module_def, + Some(ref type_def) => (*type_def).module_def, None => None } } @@ -666,14 +666,14 @@ impl NameBindings { TypeNS => { match self.type_def { None => None, - Some(type_def) => { + Some(ref type_def) => { // FIXME (#3784): This is reallllly questionable. // Perhaps the right thing to do is to merge def_mod // and def_ty. - match type_def.type_def { + match (*type_def).type_def { Some(type_def) => Some(type_def), None => { - match type_def.module_def { + match (*type_def).module_def { Some(module_def) => { module_def.def_id.map(|def_id| def_mod(*def_id)) @@ -699,7 +699,7 @@ impl NameBindings { TypeNS => { match self.type_def { None => None, - Some(type_def) => Some(type_def.privacy) + Some(ref type_def) => Some((*type_def).privacy) } } ValueNS => { @@ -1166,14 +1166,14 @@ impl Resolver { (privacy, def_ty(local_def(item.id)), sp); } - item_enum(enum_definition, _) => { + item_enum(ref enum_definition, _) => { let (name_bindings, new_parent) = self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); - for enum_definition.variants.each |variant| { + for (*enum_definition).variants.each |variant| { self.build_reduced_graph_for_variant(*variant, local_def(item.id), // inherited => privacy of the enum item @@ -1277,7 +1277,7 @@ impl Resolver { visit_item(item, parent, visitor); } - item_trait(_, _, methods) => { + item_trait(_, _, ref methods) => { let (name_bindings, new_parent) = self.add_child(ident, parent, ForbidDuplicateTypes, sp); @@ -1287,7 +1287,7 @@ impl Resolver { // We only need to create the module if the trait has static // methods, so check that first. let mut has_static_methods = false; - for methods.each |method| { + for (*methods).each |method| { let ty_m = trait_method_to_ty_method(*method); match ty_m.self_ty.node { sty_static => { @@ -1315,7 +1315,7 @@ impl Resolver { // Add the names of all the methods to the trait info. let method_names = @HashMap(); - for methods.each |method| { + for (*methods).each |method| { let ty_m = trait_method_to_ty_method(*method); let ident = ty_m.ident; @@ -1403,11 +1403,11 @@ impl Resolver { variant.span); self.structs.insert(local_def(variant.node.id), ()); } - enum_variant_kind(enum_definition) => { + enum_variant_kind(ref enum_definition) => { (*child).define_type(privacy, def_ty(local_def(variant.node.id)), variant.span); - for enum_definition.variants.each |variant| { + for (*enum_definition).variants.each |variant| { self.build_reduced_graph_for_variant(*variant, item_id, parent_privacy, parent, visitor); @@ -1475,8 +1475,8 @@ impl Resolver { subclass, view_path.span); } - view_path_list(_, source_idents, _) => { - for source_idents.each |source_ident| { + view_path_list(_, ref source_idents, _) => { + for (*source_idents).each |source_ident| { let name = source_ident.node.name; let subclass = @SingleImport(name, name, @@ -1527,9 +1527,9 @@ impl Resolver { unsupported"); } - view_path_list(path, path_list_idents, _) => { + view_path_list(path, ref path_list_idents, _) => { if path.idents.len() == 1u && - path_list_idents.len() == 0 { + (*path_list_idents).len() == 0 { self.session.span_warn(view_item.span, ~"this syntax for \ @@ -1546,7 +1546,7 @@ impl Resolver { in this module"); } - for path_list_idents.each |path_list_ident| { + for (*path_list_idents).each |path_list_ident| { let ident = path_list_ident.node.name; let id = path_list_ident.node.id; module_.exported_names.insert(ident, id); @@ -2838,8 +2838,8 @@ impl Resolver { match self.resolve_item_in_lexical_scope(module_, name, TypeNS) { Success(target) => { match target.bindings.type_def { - Some(type_def) => { - match type_def.module_def { + Some(ref type_def) => { + match (*type_def).module_def { None => { error!("!!! (resolving module in lexical \ scope) module wasn't actually a \ @@ -3541,9 +3541,9 @@ impl Resolver { // enum item: resolve all the variants' discrs, // then resolve the ty params - item_enum(enum_def, type_parameters) => { + item_enum(ref enum_def, type_parameters) => { - for enum_def.variants.each() |variant| { + for (*enum_def).variants.each() |variant| { do variant.node.disr_expr.iter() |dis_expr| { // resolve the discriminator expr // as a constant @@ -3588,7 +3588,7 @@ impl Resolver { visitor); } - item_trait(type_parameters, traits, methods) => { + item_trait(type_parameters, traits, ref methods) => { // Create a new rib for the self type. let self_type_rib = @Rib(NormalRibKind); (*self.type_ribs).push(self_type_rib); @@ -3623,30 +3623,30 @@ impl Resolver { } } - for methods.each |method| { + for (*methods).each |method| { // Create a new rib for the method-specific type // parameters. // // XXX: Do we need a node ID here? match *method { - required(ty_m) => { + required(ref ty_m) => { do self.with_type_parameter_rib - (HasTypeParameters(&ty_m.tps, + (HasTypeParameters(&(*ty_m).tps, item.id, type_parameters.len(), MethodRibKind(item.id, Required))) { // Resolve the method-specific type // parameters. - self.resolve_type_parameters(ty_m.tps, + self.resolve_type_parameters((*ty_m).tps, visitor); - for ty_m.decl.inputs.each |argument| { + for (*ty_m).decl.inputs.each |argument| { self.resolve_type(argument.ty, visitor); } - self.resolve_type(ty_m.decl.output, visitor); + self.resolve_type((*ty_m).decl.output, visitor); } } provided(m) => { @@ -3705,7 +3705,7 @@ impl Resolver { } } - item_fn(fn_decl, _, ty_params, block) => { + item_fn(fn_decl, _, ty_params, ref block) => { // If this is the main function, we must record it in the // session. // @@ -3726,7 +3726,7 @@ impl Resolver { item.id, 0, OpaqueFunctionRibKind), - block, + (*block), NoSelfBinding, NoCaptureClause, visitor); @@ -3966,13 +3966,13 @@ impl Resolver { None => { // Nothing to do. } - Some(destructor) => { + Some(ref destructor) => { self.resolve_function(NormalRibKind, None, NoTypeParameters, - destructor.node.body, + (*destructor).node.body, HasSelfBinding - (destructor.node.self_id), + ((*destructor).node.self_id), NoCaptureClause, visitor); } @@ -4892,12 +4892,12 @@ impl Resolver { visit_expr(expr, (), visitor); } - expr_fn(_, fn_decl, block, capture_clause) | - expr_fn_block(fn_decl, block, capture_clause) => { - self.resolve_function(FunctionRibKind(expr.id, block.node.id), + expr_fn(_, fn_decl, ref block, capture_clause) | + expr_fn_block(fn_decl, ref block, capture_clause) => { + self.resolve_function(FunctionRibKind(expr.id, (*block).node.id), Some(@fn_decl), NoTypeParameters, - block, + (*block), NoSelfBinding, HasCaptureClause(capture_clause), visitor); diff --git a/src/librustc/middle/trans/alt.rs b/src/librustc/middle/trans/alt.rs index bc26338a624..90ac9a9ce3c 100644 --- a/src/librustc/middle/trans/alt.rs +++ b/src/librustc/middle/trans/alt.rs @@ -765,7 +765,7 @@ fn extract_variant_args(bcx: block, pat_id: ast::node_id, let _icx = bcx.insn_ctxt("alt::extract_variant_args"); let ccx = bcx.fcx.ccx; let enum_ty_substs = match ty::get(node_id_type(bcx, pat_id)).sty { - ty::ty_enum(id, substs) => { assert id == vdefs.enm; substs.tps } + ty::ty_enum(id, ref substs) => { assert id == vdefs.enm; (*substs).tps } _ => bcx.sess().bug(~"extract_variant_args: pattern has non-enum type") }; let mut blobptr = val; diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 7114fb9fb23..e5f1ae5d0b8 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -576,14 +576,14 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, cx = f(cx, llfld_a, *arg); } } - ty::ty_enum(tid, substs) => { + ty::ty_enum(tid, ref substs) => { let variants = ty::enum_variants(cx.tcx(), tid); let n_variants = (*variants).len(); // Cast the enums to types we can GEP into. if n_variants == 1u { return iter_variant(cx, av, variants[0], - substs.tps, tid, f); + (*substs).tps, tid, f); } let ccx = cx.ccx(); @@ -608,7 +608,7 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, AddCase(llswitch, C_int(ccx, variant.disr_val), variant_cx.llbb); let variant_cx = iter_variant(variant_cx, llunion_a_ptr, *variant, - substs.tps, tid, f); + (*substs).tps, tid, f); Br(variant_cx, next_cx.llbb); } return next_cx; @@ -754,8 +754,8 @@ fn need_invoke(bcx: block) -> bool { let mut cur = bcx; loop { match cur.kind { - block_scope(inf) => { - for vec::each(inf.cleanups) |cleanup| { + block_scope(ref inf) => { + for vec::each((*inf).cleanups) |cleanup| { match *cleanup { clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => { if cleanup_type == normal_exit_and_unwind { @@ -789,9 +789,9 @@ fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) { let mut bcx = bcx; loop { match bcx.kind { - block_scope(inf) => { - if inf.cleanups.len() > 0u || bcx.parent.is_none() { - f(inf); return; + block_scope(ref inf) => { + if (*inf).cleanups.len() > 0u || bcx.parent.is_none() { + f((*inf)); return; } } _ => () @@ -1159,15 +1159,15 @@ fn cleanup_and_leave(bcx: block, upto: Option<BasicBlockRef>, } match cur.kind { - block_scope(inf) if inf.cleanups.len() > 0u => { - for vec::find(inf.cleanup_paths, + block_scope(ref inf) if (*inf).cleanups.len() > 0u => { + for vec::find((*inf).cleanup_paths, |cp| cp.target == leave).each |cp| { Br(bcx, cp.dest); return; } let sub_cx = sub_block(bcx, ~"cleanup"); Br(bcx, sub_cx.llbb); - inf.cleanup_paths.push({target: leave, dest: sub_cx.llbb}); + (*inf).cleanup_paths.push({target: leave, dest: sub_cx.llbb}); bcx = trans_block_cleanups_(sub_cx, block_cleanups(cur), is_lpad); } _ => () @@ -1831,8 +1831,8 @@ fn trans_enum_def(ccx: @crate_ctxt, enum_definition: ast::enum_def, trans_struct_def(ccx, struct_def, tps, path, variant.node.name, variant.node.id); } - ast::enum_variant_kind(enum_definition) => { - trans_enum_def(ccx, enum_definition, id, tps, degen, path, vi, + ast::enum_variant_kind(ref enum_definition) => { + trans_enum_def(ccx, (*enum_definition), id, tps, degen, path, vi, i); } } @@ -1847,21 +1847,21 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { _ => fail ~"trans_item", }; match item.node { - ast::item_fn(decl, purity, tps, body) => { + ast::item_fn(decl, purity, tps, ref body) => { if purity == ast::extern_fn { let llfndecl = get_item_val(ccx, item.id); foreign::trans_foreign_fn(ccx, vec::append( *path, ~[path_name(item.ident)]), - decl, body, llfndecl, item.id); + decl, (*body), llfndecl, item.id); } else if tps.is_empty() { let llfndecl = get_item_val(ccx, item.id); trans_fn(ccx, vec::append(*path, ~[path_name(item.ident)]), - decl, body, llfndecl, no_self, None, item.id, None); + decl, (*body), llfndecl, no_self, None, item.id, None); } else { - for vec::each(body.node.stmts) |stmt| { + for vec::each((*body).node.stmts) |stmt| { match stmt.node { ast::stmt_decl(@{node: ast::decl_item(i), _}, _) => { trans_item(ccx, *i); @@ -1882,12 +1882,12 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { ast::item_mod(m) => { trans_mod(ccx, m); } - ast::item_enum(enum_definition, tps) => { + ast::item_enum(ref enum_definition, tps) => { if tps.len() == 0u { - let degen = enum_definition.variants.len() == 1u; + let degen = (*enum_definition).variants.len() == 1u; let vi = ty::enum_variants(ccx.tcx, local_def(item.id)); let mut i = 0; - trans_enum_def(ccx, enum_definition, item.id, tps, degen, path, + trans_enum_def(ccx, (*enum_definition), item.id, tps, degen, path, vi, &mut i); } } @@ -1895,7 +1895,7 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { ast::item_foreign_mod(foreign_mod) => { let abi = match attr::foreign_abi(item.attrs) { either::Right(abi_) => abi_, - either::Left(msg) => ccx.sess.span_fatal(item.span, msg) + either::Left(ref msg) => ccx.sess.span_fatal(item.span, (*msg)) }; foreign::trans_foreign_mod(ccx, foreign_mod, abi); } @@ -2073,7 +2073,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, substs: Option<param_substs>) -> ~str { let t = ty::node_id_to_type(ccx.tcx, id); match ccx.item_symbols.find(id) { - Some(s) => s, + Some(ref s) => (*s), None if substs.is_none() => { let s = mangle_exported_name( ccx, @@ -2205,17 +2205,17 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { llfn } - ast_map::node_variant(v, enm, pth) => { + ast_map::node_variant(ref v, enm, pth) => { let llfn; - match v.node.kind { + match (*v).node.kind { ast::tuple_variant_kind(args) => { assert args.len() != 0u; let pth = vec::append(*pth, ~[path_name(enm.ident), - path_name(v.node.name)]); + path_name((*v).node.name)]); llfn = match enm.node { ast::item_enum(_, _) => { - register_fn(ccx, v.span, pth, id) + register_fn(ccx, (*v).span, pth, id) } _ => fail ~"node_variant, shouldn't happen" }; @@ -2302,12 +2302,12 @@ fn register_deriving_method(ccx: @crate_ctxt, fn trans_constant(ccx: @crate_ctxt, it: @ast::item) { let _icx = ccx.insn_ctxt("trans_constant"); match it.node { - ast::item_enum(enum_definition, _) => { + ast::item_enum(ref enum_definition, _) => { let vi = ty::enum_variants(ccx.tcx, {crate: ast::local_crate, node: it.id}); let mut i = 0; let path = item_path(ccx, it); - for vec::each(enum_definition.variants) |variant| { + for vec::each((*enum_definition).variants) |variant| { let p = vec::append(path, ~[path_name(variant.node.name), path_name(special_idents::descrim)]); let s = mangle_exported_name(ccx, p, ty::mk_int(ccx.tcx)); diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index be6529f2a5f..7c98f2e6a64 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -62,9 +62,9 @@ fn trans(bcx: block, expr: @ast::expr) -> Callee { } ast::expr_field(base, _, _) => { match bcx.ccx().maps.method_map.find(expr.id) { - Some(origin) => { // An impl method + Some(ref origin) => { // An impl method return meth::trans_method_callee(bcx, expr.id, - base, origin); + base, (*origin)); } None => {} // not a method, just a field } @@ -316,11 +316,11 @@ fn trans_method_call(in_cx: block, expr_ty(in_cx, call_ex), |cx| { match cx.ccx().maps.method_map.find(call_ex.id) { - Some(origin) => { + Some(ref origin) => { meth::trans_method_callee(cx, call_ex.callee_id, rcvr, - origin) + (*origin)) } None => { cx.tcx().sess.span_bug(call_ex.span, @@ -427,9 +427,9 @@ fn trans_call_inner( ArgExprs(args) => { args.len() > 0u && match vec::last(args).node { ast::expr_loop_body(@{ - node: ast::expr_fn_block(_, body, _), + node: ast::expr_fn_block(_, ref body, _), _ - }) => body_contains_ret(body), + }) => body_contains_ret((*body)), _ => false } } @@ -622,14 +622,14 @@ fn trans_arg_expr(bcx: block, Some(_) => { match arg_expr.node { ast::expr_loop_body( - blk @ @{node:ast::expr_fn_block(decl, body, cap), _}) => + blk @ @{node:ast::expr_fn_block(decl, ref body, cap), _}) => { let scratch_ty = expr_ty(bcx, blk); let scratch = alloc_ty(bcx, scratch_ty); let arg_ty = expr_ty(bcx, arg_expr); let proto = ty::ty_fn_proto(arg_ty); let bcx = closure::trans_expr_fn( - bcx, proto, decl, body, blk.id, + bcx, proto, decl, (*body), blk.id, cap, Some(ret_flag), expr::SaveIn(scratch)); DatumBlock {bcx: bcx, datum: Datum {val: scratch, diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 25a0f5aa80c..25cd169c403 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -444,7 +444,7 @@ fn revoke_clean(cx: block, val: ValueRef) { fn block_cleanups(bcx: block) -> ~[cleanup] { match bcx.kind { block_non_scope => ~[], - block_scope(inf) => inf.cleanups + block_scope(ref inf) => (*inf).cleanups } } @@ -601,10 +601,10 @@ fn in_scope_cx(cx: block, f: fn(scope_info)) { let mut cur = cx; loop { match cur.kind { - block_scope(inf) => { + block_scope(ref inf) => { debug!("in_scope_cx: selected cur=%s (cx=%s)", cur.to_str(), cx.to_str()); - f(inf); + f((*inf)); return; } _ => () diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs index 36c85439ff5..b85e45d0c52 100644 --- a/src/librustc/middle/trans/consts.rs +++ b/src/librustc/middle/trans/consts.rs @@ -323,9 +323,9 @@ fn const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef { ast::expr_tup(es) => { C_struct(es.map(|e| const_expr(cx, *e))) } - ast::expr_rec(fs, None) => { + ast::expr_rec(ref fs, None) => { C_struct([C_struct( - fs.map(|f| const_expr(cx, f.node.expr)))]) + (*fs).map(|f| const_expr(cx, f.node.expr)))]) } ast::expr_struct(_, ref fs, _) => { let ety = ty::expr_ty(cx.tcx, e); @@ -334,7 +334,7 @@ fn const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef { None) |_hd, field_tys| { field_tys.map(|field_ty| { match fs.find(|f| field_ty.ident == f.node.ident) { - Some(f) => const_expr(cx, f.node.expr), + Some(ref f) => const_expr(cx, (*f).node.expr), None => { cx.tcx.sess.span_bug( e.span, ~"missing struct field"); diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index 346fb6a34ff..e7d3c8a207c 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -75,8 +75,8 @@ fn trans_if(bcx: block, let elseif_blk = ast_util::block_from_expr(elexpr); trans_block(else_bcx_in, elseif_blk, dest) } - ast::expr_block(blk) => { - trans_block(else_bcx_in, blk, dest) + ast::expr_block(ref blk) => { + trans_block(else_bcx_in, (*blk), dest) } // would be nice to have a constraint on ifs _ => bcx.tcx().sess.bug(~"strange alternative in if") diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 5ca72f58a6f..4e9e1556124 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -452,11 +452,11 @@ fn trans_rvalue_stmt_unadjusted(bcx: block, expr: @ast::expr) -> block { ast::expr_assert(a) => { return controlflow::trans_check_expr(bcx, expr, a, ~"Assertion"); } - ast::expr_while(cond, body) => { - return controlflow::trans_while(bcx, cond, body); + ast::expr_while(cond, ref body) => { + return controlflow::trans_while(bcx, cond, (*body)); } - ast::expr_loop(body, opt_label) => { - return controlflow::trans_loop(bcx, body, opt_label); + ast::expr_loop(ref body, opt_label) => { + return controlflow::trans_loop(bcx, (*body), opt_label); } ast::expr_assign(dst, src) => { let src_datum = unpack_datum!(bcx, trans_to_datum(bcx, src)); @@ -504,20 +504,20 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr, return trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest); } - ast::expr_if(cond, thn, els) => { - return controlflow::trans_if(bcx, cond, thn, els, dest); + ast::expr_if(cond, ref thn, els) => { + return controlflow::trans_if(bcx, cond, (*thn), els, dest); } - ast::expr_match(discr, arms) => { - return alt::trans_alt(bcx, expr, discr, arms, dest); + ast::expr_match(discr, ref arms) => { + return alt::trans_alt(bcx, expr, discr, (*arms), dest); } - ast::expr_block(blk) => { - return do base::with_scope(bcx, blk.info(), + ast::expr_block(ref blk) => { + return do base::with_scope(bcx, (*blk).info(), ~"block-expr body") |bcx| { - controlflow::trans_block(bcx, blk, dest) + controlflow::trans_block(bcx, (*blk), dest) }; } - ast::expr_rec(fields, base) | ast::expr_struct(_, fields, base) => { - return trans_rec_or_struct(bcx, fields, base, expr.id, dest); + ast::expr_rec(ref fields, base) | ast::expr_struct(_, ref fields, base) => { + return trans_rec_or_struct(bcx, (*fields), base, expr.id, dest); } ast::expr_tup(args) => { return trans_tup(bcx, args, dest); @@ -534,14 +534,14 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr, ast::expr_vec(*) | ast::expr_repeat(*) => { return tvec::trans_fixed_vstore(bcx, expr, expr, dest); } - ast::expr_fn(proto, decl, body, cap_clause) => { + ast::expr_fn(proto, decl, ref body, cap_clause) => { // Don't use this function for anything real. Use the one in // astconv instead. return closure::trans_expr_fn(bcx, proto, - decl, body, expr.id, cap_clause, + decl, (*body), expr.id, cap_clause, None, dest); } - ast::expr_fn_block(decl, body, cap_clause) => { + ast::expr_fn_block(decl, ref body, cap_clause) => { let expr_ty = expr_ty(bcx, expr); match ty::get(expr_ty).sty { ty::ty_fn(ref fn_ty) => { @@ -549,7 +549,7 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr, expr_to_str(expr, tcx.sess.intr()), ty_to_str(tcx, expr_ty)); return closure::trans_expr_fn( - bcx, fn_ty.meta.proto, decl, body, + bcx, fn_ty.meta.proto, decl, (*body), expr.id, cap_clause, None, dest); } @@ -563,9 +563,9 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr, match ty::get(expr_ty(bcx, expr)).sty { ty::ty_fn(ref fn_ty) => { match blk.node { - ast::expr_fn_block(decl, body, cap) => { + ast::expr_fn_block(decl, ref body, cap) => { return closure::trans_expr_fn( - bcx, fn_ty.meta.proto, decl, body, blk.id, + bcx, fn_ty.meta.proto, decl, (*body), blk.id, cap, Some(None), dest); } _ => { diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index 3180b595f9c..ba9b065901e 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -435,7 +435,7 @@ fn decl_x86_64_fn(tys: x86_64_tys, fn link_name(ccx: @crate_ctxt, i: @ast::foreign_item) -> ~str { match attr::first_attr_value_str_by_name(i.attrs, ~"link_name") { None => ccx.sess.str_of(i.ident), - option::Some(ln) => ln + option::Some(ref ln) => (*ln) } } @@ -608,10 +608,10 @@ fn trans_foreign_mod(ccx: @crate_ctxt, let n = vec::len(tys.arg_tys); match tys.x86_64_tys { - Some(x86_64) => { - let mut atys = x86_64.arg_tys; - let mut attrs = x86_64.attrs; - if x86_64.sret { + Some(ref x86_64) => { + let mut atys = (*x86_64).arg_tys; + let mut attrs = (*x86_64).attrs; + if (*x86_64).sret { let llretptr = GEPi(bcx, llargbundle, [0u, n]); let llretloc = Load(bcx, llretptr); llargvals = ~[llretloc]; @@ -649,8 +649,8 @@ fn trans_foreign_mod(ccx: @crate_ctxt, llargbundle: ValueRef, llretval: ValueRef) { let _icx = bcx.insn_ctxt("foreign::shim::build_ret"); match tys.x86_64_tys { - Some(x86_64) => { - for vec::eachi(x86_64.attrs) |i, a| { + Some(ref x86_64) => { + for vec::eachi((*x86_64).attrs) |i, a| { match *a { Some(attr) => { llvm::LLVMAddInstrAttribute( @@ -660,15 +660,15 @@ fn trans_foreign_mod(ccx: @crate_ctxt, _ => () } } - if x86_64.sret || !tys.ret_def { + if (*x86_64).sret || !tys.ret_def { return; } let n = vec::len(tys.arg_tys); let llretptr = GEPi(bcx, llargbundle, [0u, n]); let llretloc = Load(bcx, llretptr); - if x86_64.ret_ty.cast { + if (*x86_64).ret_ty.cast { let tmp_ptr = BitCast(bcx, llretloc, - T_ptr(x86_64.ret_ty.ty)); + T_ptr((*x86_64).ret_ty.ty)); Store(bcx, llretval, tmp_ptr); } else { Store(bcx, llretval, llretloc); @@ -700,8 +700,8 @@ fn trans_foreign_mod(ccx: @crate_ctxt, cc: lib::llvm::CallConv) -> ValueRef { // Declare the "prototype" for the base function F: match tys.x86_64_tys { - Some(x86_64) => { - do decl_x86_64_fn(x86_64) |fnty| { + Some(ref x86_64) => { + do decl_x86_64_fn((*x86_64)) |fnty| { decl_fn(ccx.llmod, lname, cc, fnty) } } @@ -1110,17 +1110,17 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, llwrapfn: ValueRef, llargbundle: ValueRef) { let _icx = bcx.insn_ctxt("foreign::foreign::wrap::build_args"); match tys.x86_64_tys { - option::Some(x86_64) => { - let mut atys = x86_64.arg_tys; - let mut attrs = x86_64.attrs; + option::Some(ref x86_64) => { + let mut atys = (*x86_64).arg_tys; + let mut attrs = (*x86_64).attrs; let mut j = 0u; - let llretptr = if x86_64.sret { + let llretptr = if (*x86_64).sret { atys = vec::tail(atys); attrs = vec::tail(attrs); j = 1u; get_param(llwrapfn, 0u) - } else if x86_64.ret_ty.cast { - let retptr = alloca(bcx, x86_64.ret_ty.ty); + } else if (*x86_64).ret_ty.cast { + let retptr = alloca(bcx, (*x86_64).ret_ty.ty); BitCast(bcx, retptr, T_ptr(tys.ret_ty)) } else { alloca(bcx, tys.ret_ty) @@ -1164,16 +1164,16 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, llargbundle: ValueRef) { let _icx = bcx.insn_ctxt("foreign::foreign::wrap::build_ret"); match tys.x86_64_tys { - option::Some(x86_64) => { - if x86_64.sret || !tys.ret_def { + option::Some(ref x86_64) => { + if (*x86_64).sret || !tys.ret_def { RetVoid(bcx); return; } let n = vec::len(tys.arg_tys); let llretval = load_inbounds(bcx, llargbundle, ~[0u, n]); - let llretval = if x86_64.ret_ty.cast { + let llretval = if (*x86_64).ret_ty.cast { let retptr = BitCast(bcx, llretval, - T_ptr(x86_64.ret_ty.ty)); + T_ptr((*x86_64).ret_ty.ty)); Load(bcx, retptr) } else { Load(bcx, llretval) @@ -1233,7 +1233,7 @@ fn abi_of_foreign_fn(ccx: @crate_ctxt, i: @ast::foreign_item) }, Some(_) => match attr::foreign_abi(i.attrs) { either::Right(abi) => abi, - either::Left(msg) => ccx.sess.span_fatal(i.span, msg) + either::Left(ref msg) => ccx.sess.span_fatal(i.span, (*msg)) } } } diff --git a/src/librustc/middle/trans/inline.rs b/src/librustc/middle/trans/inline.rs index 1cbae2eac7e..1bdcefb08d7 100644 --- a/src/librustc/middle/trans/inline.rs +++ b/src/librustc/middle/trans/inline.rs @@ -103,9 +103,9 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id, } local_def(mth.id) } - csearch::found(ast::ii_dtor(dtor, _, _, _)) => { - ccx.external.insert(fn_id, Some(dtor.node.id)); - local_def(dtor.node.id) + csearch::found(ast::ii_dtor(ref dtor, _, _, _)) => { + ccx.external.insert(fn_id, Some((*dtor).node.id)); + local_def((*dtor).node.id) } } } diff --git a/src/librustc/middle/trans/monomorphize.rs b/src/librustc/middle/trans/monomorphize.rs index 5ecd33c9083..86c571086c6 100644 --- a/src/librustc/middle/trans/monomorphize.rs +++ b/src/librustc/middle/trans/monomorphize.rs @@ -73,7 +73,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, // Get the path so that we can create a symbol let (pt, name, span) = match map_node { ast_map::node_item(i, pt) => (pt, i.ident, i.span), - ast_map::node_variant(v, enm, pt) => (pt, v.node.name, enm.span), + ast_map::node_variant(ref v, enm, pt) => (pt, (*v).node.name, enm.span), ast_map::node_method(m, _, pt) => (pt, m.ident, m.span), ast_map::node_foreign_item(i, ast::foreign_abi_rust_intrinsic, pt) => (pt, i.ident, i.span), @@ -152,10 +152,10 @@ fn monomorphic_fn(ccx: @crate_ctxt, }); let lldecl = match map_node { - ast_map::node_item(i@@{node: ast::item_fn(decl, _, _, body), _}, _) => { + ast_map::node_item(i@@{node: ast::item_fn(decl, _, _, ref body), _}, _) => { let d = mk_lldecl(); set_inline_hint_if_appr(i.attrs, d); - trans_fn(ccx, pt, decl, body, d, no_self, psubsts, fn_id.node, None); + trans_fn(ccx, pt, decl, (*body), d, no_self, psubsts, fn_id.node, None); d } ast_map::node_item(*) => { @@ -167,15 +167,15 @@ fn monomorphic_fn(ccx: @crate_ctxt, ref_id); d } - ast_map::node_variant(v, enum_item, _) => { + ast_map::node_variant(ref v, enum_item, _) => { let tvs = ty::enum_variants(ccx.tcx, local_def(enum_item.id)); let this_tv = option::get(vec::find(*tvs, |tv| { tv.id.node == fn_id.node})); let d = mk_lldecl(); set_inline_hint(d); - match v.node.kind { + match (*v).node.kind { ast::tuple_variant_kind(args) => { - trans_enum_variant(ccx, enum_item.id, v, args, + trans_enum_variant(ccx, enum_item.id, (*v), args, this_tv.disr_val, (*tvs).len() == 1u, psubsts, d); } diff --git a/src/librustc/middle/trans/reachable.rs b/src/librustc/middle/trans/reachable.rs index d2208ccb859..c5e6753a97d 100644 --- a/src/librustc/middle/trans/reachable.rs +++ b/src/librustc/middle/trans/reachable.rs @@ -45,8 +45,8 @@ fn find_reachable(crate_mod: _mod, exp_map2: resolve::ExportMap2, fn traverse_exports(cx: ctx, mod_id: node_id) -> bool { let mut found_export = false; match cx.exp_map2.find(mod_id) { - Some(exp2s) => { - for exp2s.each |e2| { + Some(ref exp2s) => { + for (*exp2s).each |e2| { found_export = true; traverse_def_id(cx, e2.def_id) }; @@ -60,7 +60,7 @@ fn traverse_def_id(cx: ctx, did: def_id) { if did.crate != local_crate { return; } let n = match cx.tcx.items.find(did.node) { None => return, // This can happen for self, for example - Some(n) => n + Some(ref n) => (*n) }; match n { ast_map::node_item(item, _) => traverse_public_item(cx, item), @@ -68,7 +68,7 @@ fn traverse_def_id(cx: ctx, did: def_id) { ast_map::node_foreign_item(item, _, _) => { cx.rmap.insert(item.id, ()); } - ast_map::node_variant(v, _, _) => { cx.rmap.insert(v.node.id, ()); } + ast_map::node_variant(ref v, _, _) => { cx.rmap.insert((*v).node.id, ()); } _ => () } } @@ -94,10 +94,10 @@ fn traverse_public_item(cx: ctx, item: @item) { } } } - item_fn(_, _, tps, blk) => { + item_fn(_, _, tps, ref blk) => { if tps.len() > 0u || attr::find_inline_attr(item.attrs) != attr::ia_none { - traverse_inline_body(cx, blk); + traverse_inline_body(cx, (*blk)); } } item_impl(tps, _, _, ms) => { diff --git a/src/librustc/middle/trans/type_of.rs b/src/librustc/middle/trans/type_of.rs index 777299d9e84..a1dfd383893 100644 --- a/src/librustc/middle/trans/type_of.rs +++ b/src/librustc/middle/trans/type_of.rs @@ -113,13 +113,13 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { ty::ty_estr(ty::vstore_uniq) => { T_unique_ptr(T_unique(cx, T_vec(cx, T_i8()))) } - ty::ty_enum(did, substs) => { + ty::ty_enum(did, ref substs) => { // Only create the named struct, but don't fill it in. We // fill it in *after* placing it into the type cache. This // avoids creating more than one copy of the enum when one // of the enum's variants refers to the enum itself. - common::T_named_struct(llvm_type_name(cx, an_enum, did, substs.tps)) + common::T_named_struct(llvm_type_name(cx, an_enum, did, (*substs).tps)) } ty::ty_estr(ty::vstore_box) => { T_box_ptr(T_box(cx, T_vec(cx, T_i8()))) @@ -179,12 +179,12 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { T_struct(tys) } ty::ty_opaque_closure_ptr(_) => T_opaque_box_ptr(cx), - ty::ty_class(did, substs) => { + ty::ty_class(did, ref substs) => { // Only create the named struct, but don't fill it in. We fill it // in *after* placing it into the type cache. This prevents // infinite recursion with recursive class types. - common::T_named_struct(llvm_type_name(cx, a_class, did, substs.tps)) + common::T_named_struct(llvm_type_name(cx, a_class, did, (*substs).tps)) } ty::ty_self => cx.tcx.sess.unimpl(~"type_of: ty_self"), ty::ty_infer(*) => cx.tcx.sess.bug(~"type_of with ty_infer"), diff --git a/src/librustc/middle/trans/type_use.rs b/src/librustc/middle/trans/type_use.rs index 7dd1fe833bd..367f10f7645 100644 --- a/src/librustc/middle/trans/type_use.rs +++ b/src/librustc/middle/trans/type_use.rs @@ -80,14 +80,14 @@ fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint) return uses; } let map_node = match ccx.tcx.items.find(fn_id_loc.node) { - Some(x) => x, + Some(ref x) => (*x), None => ccx.sess.bug(fmt!("type_uses_for: unbound item ID %?", fn_id_loc)) }; match map_node { - ast_map::node_item(@{node: item_fn(_, _, _, body), _}, _) | - ast_map::node_method(@{body, _}, _, _) => { - handle_body(cx, body); + ast_map::node_item(@{node: item_fn(_, _, _, ref body), _}, _) | + ast_map::node_method(@{body: ref body, _}, _, _) => { + handle_body(cx, (*body)); } ast_map::node_trait_method(*) => { // This will be a static trait method. For now, we just assume @@ -157,12 +157,12 @@ fn type_needs_inner(cx: ctx, use_: uint, ty: ty::t, */ ty::ty_fn(_) | ty::ty_ptr(_) | ty::ty_rptr(_, _) | ty::ty_trait(_, _, _) => false, - ty::ty_enum(did, substs) => { + ty::ty_enum(did, ref substs) => { if option::is_none(&list::find(enums_seen, |id| *id == did)) { let seen = @Cons(did, enums_seen); for vec::each(*ty::enum_variants(cx.ccx.tcx, did)) |v| { for vec::each(v.args) |aty| { - let t = ty::subst(cx.ccx.tcx, &substs, *aty); + let t = ty::subst(cx.ccx.tcx, &(*substs), *aty); type_needs_inner(cx, use_, t, seen); } } diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 86d8ab68128..d6fd351b2df 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -429,7 +429,10 @@ type ctxt = destructor_for_type: HashMap<ast::def_id, ast::def_id>, // A method will be in this list if and only if it is a destructor. - destructors: HashMap<ast::def_id, ()> + destructors: HashMap<ast::def_id, ()>, + + // Records the value mode (read, copy, or move) for every value. + value_modes: HashMap<ast::node_id, ValueMode>, }; enum tbox_flag { @@ -918,7 +921,7 @@ fn mk_ctxt(s: session::Session, let mut legacy_modes = false; for crate.node.attrs.each |attribute| { match attribute.node.value.node { - ast::meta_word(w) if w == ~"legacy_modes" => { + ast::meta_word(ref w) if (*w) == ~"legacy_modes" => { legacy_modes = true; break; } @@ -968,7 +971,8 @@ fn mk_ctxt(s: session::Session, automatically_derived_methods: HashMap(), automatically_derived_methods_for_impl: HashMap(), destructor_for_type: HashMap(), - destructors: HashMap()} + destructors: HashMap(), + value_modes: HashMap()} } @@ -1182,7 +1186,7 @@ pure fn mach_sty(cfg: @session::config, t: t) -> sty { ty_int(ast::ty_i) => ty_int(cfg.int_type), ty_uint(ast::ty_u) => ty_uint(cfg.uint_type), ty_float(ast::ty_f) => ty_float(cfg.float_type), - s => s + ref s => (*s) } } @@ -1255,9 +1259,9 @@ fn maybe_walk_ty(ty: t, f: fn(t) -> bool) { ty_ptr(tm) | ty_rptr(_, tm) => { maybe_walk_ty(tm.ty, f); } - ty_enum(_, substs) | ty_class(_, substs) | - ty_trait(_, substs, _) => { - for substs.tps.each |subty| { maybe_walk_ty(*subty, f); } + ty_enum(_, ref substs) | ty_class(_, ref substs) | + ty_trait(_, ref substs, _) => { + for (*substs).tps.each |subty| { maybe_walk_ty(*subty, f); } } ty_rec(fields) => { for fields.each |fl| { maybe_walk_ty(fl.mt.ty, f); } @@ -2913,7 +2917,7 @@ fn is_fn_ty(fty: t) -> bool { fn ty_region(ty: t) -> Region { match get(ty).sty { ty_rptr(r, _) => r, - s => fail fmt!("ty_region() invoked on non-rptr: %?", s) + ref s => fail fmt!("ty_region() invoked on non-rptr: %?", (*s)) } } @@ -3246,10 +3250,10 @@ fn canon<T:Copy cmp::Eq>(tbl: HashMap<ast::node_id, ast::inferable<T>>, match m0 { ast::infer(id) => match tbl.find(id) { None => m0, - Some(m1) => { - let cm1 = canon(tbl, m1); + Some(ref m1) => { + let cm1 = canon(tbl, (*m1)); // path compression: - if cm1 != m1 { tbl.insert(id, cm1); } + if cm1 != (*m1) { tbl.insert(id, cm1); } cm1 } }, @@ -3440,11 +3444,11 @@ fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str { but found bound lifetime parameter %s", bound_region_to_str(cx, br)) } - terr_vstores_differ(k, values) => { + terr_vstores_differ(k, ref values) => { fmt!("%s storage differs: expected %s but found %s", terr_vstore_kind_to_str(k), - vstore_to_str(cx, values.expected), - vstore_to_str(cx, values.found)) + vstore_to_str(cx, (*values).expected), + vstore_to_str(cx, (*values).found)) } terr_in_field(err, fname) => { fmt!("in field `%s`, %s", cx.sess.str_of(fname), @@ -3515,8 +3519,8 @@ fn store_trait_methods(cx: ctxt, id: ast::node_id, ms: @~[method]) { fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[ast::ident] { if is_local(id) { match cx.items.find(id.node) { - Some(ast_map::node_item(@{node: item_trait(_, _, ms),_}, _)) => - match ast_util::split_trait_methods(ms) { + Some(ast_map::node_item(@{node: item_trait(_, _, ref ms),_}, _)) => + match ast_util::split_trait_methods((*ms)) { (_, p) => p.map(|method| method.ident) }, _ => cx.sess.bug(fmt!("provided_trait_methods: %? is not a trait", @@ -3543,10 +3547,10 @@ fn trait_supertraits(cx: ctxt, id: ast::def_id) -> @~[InstantiatedTraitRef] { let result = dvec::DVec(); for csearch::get_supertraits(cx, id).each |trait_type| { match get(*trait_type).sty { - ty_trait(def_id, substs, _) => { + ty_trait(def_id, ref substs, _) => { result.push(InstantiatedTraitRef { def_id: def_id, - tpt: { substs: substs, ty: *trait_type } + tpt: { substs: (*substs), ty: *trait_type } }); } _ => cx.sess.bug(~"trait_supertraits: trait ref wasn't a trait") @@ -3583,7 +3587,7 @@ fn impl_traits(cx: ctxt, id: ast::def_id, vstore: vstore) -> ~[t] { fn vstoreify(cx: ctxt, ty: t, vstore: vstore) -> t { match ty::get(ty).sty { ty::ty_trait(_, _, trait_vstore) if vstore == trait_vstore => ty, - ty::ty_trait(did, substs, _) => mk_trait(cx, did, substs, vstore), + ty::ty_trait(did, ref substs, _) => mk_trait(cx, did, (*substs), vstore), _ => cx.sess.bug(~"impl_traits: not a trait") } } @@ -3706,10 +3710,10 @@ fn ty_dtor(cx: ctxt, class_id: def_id) -> DtorKind { if is_local(class_id) { match cx.items.find(class_id.node) { Some(ast_map::node_item(@{ - node: ast::item_class(@{ dtor: Some(dtor), _ }, _), + node: ast::item_class(@{ dtor: Some(ref dtor), _ }, _), _ }, _)) => - LegacyDtor(local_def(dtor.node.id)), + LegacyDtor(local_def((*dtor).node.id)), _ => NoDtor } @@ -3756,9 +3760,9 @@ fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path { vec::append_one(*path, ast_map::path_name(method.ident)) } - ast_map::node_variant(variant, _, path) => { + ast_map::node_variant(ref variant, _, path) => { vec::append_one(vec::init(*path), - ast_map::path_name(variant.node.name)) + ast_map::path_name((*variant).node.name)) } ast_map::node_dtor(_, _, _, path) => { @@ -3805,9 +3809,9 @@ fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] { expr, since check_enum_variants also updates the enum_var_cache */ match cx.items.get(id.node) { - ast_map::node_item(@{node: ast::item_enum(enum_definition, _), _}, + ast_map::node_item(@{node: ast::item_enum(ref enum_definition, _), _}, _) => { - let variants = enum_definition.variants; + let variants = (*enum_definition).variants; let mut disr_val = -1; @vec::map(variants, |variant| { match variant.node.kind { @@ -3921,8 +3925,8 @@ fn lookup_class_fields(cx: ctxt, did: ast::def_id) -> ~[field_ty] { _ => cx.sess.bug(~"class ID bound to non-class") } } - Some(ast_map::node_variant(variant, _, _)) => { - match variant.node.kind { + Some(ast_map::node_variant(ref variant, _, _)) => { + match (*variant).node.kind { ast::struct_variant_kind(struct_def) => { class_field_tys(struct_def.fields) } @@ -4180,24 +4184,24 @@ fn normalize_ty(cx: ctxt, t: t) -> t { }) } - ty_enum(did, r) => - match r.self_r { + ty_enum(did, ref r) => + match (*r).self_r { Some(_) => // Use re_static since trans doesn't care about regions mk_enum(cx, did, {self_r: Some(ty::re_static), self_ty: None, - tps: r.tps}), + tps: (*r).tps}), None => t }, - ty_class(did, r) => - match r.self_r { + ty_class(did, ref r) => + match (*r).self_r { Some(_) => // Ditto. mk_class(cx, did, {self_r: Some(ty::re_static), self_ty: None, - tps: r.tps}), + tps: (*r).tps}), None => t }, @@ -4544,9 +4548,9 @@ impl sty : cmp::Eq { _ => false } } - ty_enum(e0a, e1a) => { + ty_enum(e0a, ref e1a) => { match (*other) { - ty_enum(e0b, e1b) => e0a == e0b && e1a == e1b, + ty_enum(e0b, ref e1b) => e0a == e0b && (*e1a) == (*e1b), _ => false } } @@ -4586,22 +4590,22 @@ impl sty : cmp::Eq { _ => false } } - ty_fn(e0a) => { + ty_fn(ref e0a) => { match (*other) { - ty_fn(e0b) => e0a == e0b, + ty_fn(ref e0b) => (*e0a) == (*e0b), _ => false } } - ty_trait(e0a, e1a, e2a) => { + ty_trait(e0a, ref e1a, e2a) => { match (*other) { - ty_trait(e0b, e1b, e2b) => - e0a == e0b && e1a == e1b && e2a == e2b, + ty_trait(e0b, ref e1b, e2b) => + e0a == e0b && (*e1a) == (*e1b) && e2a == e2b, _ => false } } - ty_class(e0a, e1a) => { + ty_class(e0a, ref e1a) => { match (*other) { - ty_class(e0b, e1b) => e0a == e0b && e1a == e1b, + ty_class(e0b, ref e1b) => e0a == e0b && (*e1a) == (*e1b), _ => false } } diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index 91e92810d25..34a5deea3e2 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -72,8 +72,8 @@ fn get_region_reporting_err(tcx: ty::ctxt, match res { result::Ok(r) => r, - result::Err(e) => { - tcx.sess.span_err(span, e); + result::Err(ref e) => { + tcx.sess.span_err(span, (*e)); ty::re_static } } @@ -204,7 +204,7 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Owned>( self, rscope, type_def_id, path); match ty::get(result.ty).sty { - ty::ty_trait(trait_def_id, substs, _) => { + ty::ty_trait(trait_def_id, ref substs, _) => { match vst { ty::vstore_box | ty::vstore_slice(*) | ty::vstore_uniq => {} @@ -218,7 +218,7 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Owned>( } } return ty::mk_trait(tcx, trait_def_id, - substs, vst); + (*substs), vst); } _ => {} @@ -297,8 +297,8 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Owned>( let flds = vec::map(fields, |t| ast_ty_to_ty(self, rscope, *t)); ty::mk_tup(tcx, flds) } - ast::ty_rec(fields) => { - let flds = do fields.map |f| { + ast::ty_rec(ref fields) => { + let flds = do (*fields).map |f| { let tm = ast_mt_to_mt(self, rscope, f.node.mt); {ident: f.node.ident, mt: tm} }; diff --git a/src/librustc/middle/typeck/check/method.rs b/src/librustc/middle/typeck/check/method.rs index 7930f215317..cb8eb07aafc 100644 --- a/src/librustc/middle/typeck/check/method.rs +++ b/src/librustc/middle/typeck/check/method.rs @@ -347,7 +347,7 @@ impl LookupContext { let bound_substs = match ty::get(bound_trait_ty).sty { - ty::ty_trait(_, substs, _) => substs, + ty::ty_trait(_, ref substs, _) => (*substs), _ => { self.bug(fmt!("add_candidates_from_param: \ non-trait bound %s", @@ -882,10 +882,10 @@ impl LookupContext { candidate_a, candidate_b); let candidates_same = match (&candidate_a.origin, &candidate_b.origin) { - (&method_param(p1), &method_param(p2)) => { - let same_trait = p1.trait_id == p2.trait_id; - let same_method = p1.method_num == p2.method_num; - let same_param = p1.param_num == p2.param_num; + (&method_param(ref p1), &method_param(ref p2)) => { + let same_trait = (*p1).trait_id == (*p2).trait_id; + let same_method = (*p1).method_num == (*p2).method_num; + let same_param = (*p1).param_num == (*p2).param_num; // The bound number may be different because // multiple bounds may lead to the same trait // impl @@ -1059,8 +1059,8 @@ impl LookupContext { method_static(impl_did) => { self.report_static_candidate(idx, impl_did) } - method_param(mp) => { - self.report_param_candidate(idx, mp.trait_id) + method_param(ref mp) => { + self.report_param_candidate(idx, (*mp).trait_id) } method_trait(trait_did, _, _) | method_self(trait_did, _) => { self.report_param_candidate(idx, trait_did) diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index bfeb8a50094..d28f871c715 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -230,7 +230,7 @@ fn check_fn(ccx: @crate_ctxt, // types with free ones. The free region references will be bound // the node_id of the body block. - let {isr, self_info, fn_ty} = { + let {isr: isr, self_info: self_info, fn_ty: fn_ty} = { let old_isr = option::map_default(&old_fcx, @Nil, |fcx| fcx.in_scope_regions); replace_bound_regions_in_fn_ty(tcx, old_isr, self_info, fn_ty, @@ -495,11 +495,11 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) { match it.node { ast::item_const(_, e) => check_const(ccx, it.span, e, it.id), - ast::item_enum(enum_definition, _) => { - check_enum_variants(ccx, it.span, enum_definition.variants, it.id); + ast::item_enum(ref enum_definition, _) => { + check_enum_variants(ccx, it.span, (*enum_definition).variants, it.id); } - ast::item_fn(decl, _, _, body) => { - check_bare_fn(ccx, decl, body, it.id, None); + ast::item_fn(decl, _, _, ref body) => { + check_bare_fn(ccx, decl, (*body), it.id, None); } ast::item_impl(_, _, ty, ms) => { let rp = ccx.tcx.region_paramd_items.find(it.id); @@ -510,8 +510,8 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) { check_method(ccx, *m, self_ty, local_def(it.id)); } } - ast::item_trait(_, _, trait_methods) => { - for trait_methods.each |trait_method| { + ast::item_trait(_, _, ref trait_methods) => { + for (*trait_methods).each |trait_method| { match *trait_method { required(*) => { // Nothing to do, since required methods don't have @@ -531,8 +531,8 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) { check_bounds_are_used(ccx, t.span, tps, tpt_ty); // If this is a record ty, check for duplicate fields match t.node { - ast::ty_rec(fields) => { - check_no_duplicate_fields(ccx.tcx, fields.map(|f| + ast::ty_rec(ref fields) => { + check_no_duplicate_fields(ccx.tcx, (*fields).map(|f| (f.node.ident, f.span))); } _ => () @@ -690,7 +690,7 @@ impl @fn_ctxt { } fn node_ty_substs(id: ast::node_id) -> ty::substs { match self.inh.node_type_substs.find(id) { - Some(ts) => ts, + Some(ref ts) => (*ts), None => { self.tcx().sess.bug( fmt!("no type substs for node %d: %s in fcx %s", @@ -720,7 +720,7 @@ impl @fn_ctxt { { match infer::mk_assignty(self.infcx(), false, expr.span, sub, sup) { Ok(None) => result::Ok(()), - Err(e) => result::Err(e), + Err(ref e) => result::Err((*e)), Ok(Some(adjustment)) => { self.write_adjustment(expr.id, adjustment); Ok(()) @@ -1191,8 +1191,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr_t, tps, DontDerefArgs) { - Some(entry) => { - fcx.ccx.method_map.insert(expr.id, entry); + Some(ref entry) => { + fcx.ccx.method_map.insert(expr.id, (*entry)); } None => { fcx.type_error_message(expr.span, @@ -1267,13 +1267,13 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, match method::lookup(fcx, op_ex, self_ex, op_ex.callee_id, opname, self_t, ~[], deref_args) { - Some(origin) => { + Some(ref origin) => { let {fty: method_ty, bot: bot} = { let method_ty = fcx.node_ty(op_ex.callee_id); check_call_inner(fcx, op_ex.span, op_ex.id, method_ty, op_ex, args, deref_args) }; - fcx.ccx.method_map.insert(op_ex.id, origin); + fcx.ccx.method_map.insert(op_ex.id, (*origin)); Some((ty::ty_fn_ret(method_ty), bot)) } _ => None @@ -1341,9 +1341,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, { let tcx = fcx.ccx.tcx; match ast_util::binop_to_method_name(op) { - Some(name) => { + Some(ref name) => { match lookup_op_method(fcx, ex, lhs_expr, lhs_resolved_t, - fcx.tcx().sess.ident_of(name), + fcx.tcx().sess.ident_of((*name)), ~[rhs], DoDerefArgs) { Some(pair) => return pair, _ => () @@ -1434,7 +1434,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, match expected_sty { Some(ty::ty_fn(ref fn_ty)) => { let id = expr.id; - let {fn_ty, _} = + let {fn_ty: fn_ty, _} = replace_bound_regions_in_fn_ty( tcx, @Nil, None, fn_ty, |br| ty::re_bound(ty::br_cap_avoid(id, @br))); @@ -1506,7 +1506,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, _ => () } } - ty::ty_class(base_id, substs) => { + ty::ty_class(base_id, ref substs) => { // This is just for fields -- the same code handles // methods in both classes and traits @@ -1515,7 +1515,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, debug!("class named %s", ty_to_str(tcx, base_t)); let cls_items = ty::lookup_class_fields(tcx, base_id); match lookup_field_ty(tcx, base_id, cls_items, - field, &substs) { + field, &(*substs)) { Some(field_ty) => { // (2) look up what field's type is, and return it fcx.write_ty(expr.id, field_ty); @@ -1532,8 +1532,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, match method::lookup(fcx, expr, base, expr.id, field, expr_t, tps, DontDerefArgs) { - Some(entry) => { - fcx.ccx.method_map.insert(expr.id, entry); + Some(ref entry) => { + fcx.ccx.method_map.insert(expr.id, (*entry)); // If we have resolved to a method but this is not in // a callee position, error @@ -2020,32 +2020,32 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, ast::expr_swap(lhs, rhs) => { bot = check_assignment(fcx, lhs, rhs, id); } - ast::expr_if(cond, thn, elsopt) => { + ast::expr_if(cond, ref thn, elsopt) => { bot = check_expr_with(fcx, cond, ty::mk_bool(tcx)) | - check_then_else(fcx, thn, elsopt, id, expr.span); + check_then_else(fcx, (*thn), elsopt, id, expr.span); } - ast::expr_while(cond, body) => { + ast::expr_while(cond, ref body) => { bot = check_expr_with(fcx, cond, ty::mk_bool(tcx)); - check_block_no_value(fcx, body); + check_block_no_value(fcx, (*body)); fcx.write_ty(id, ty::mk_nil(tcx)); } - ast::expr_loop(body, _) => { - check_block_no_value(fcx, body); + ast::expr_loop(ref body, _) => { + check_block_no_value(fcx, (*body)); fcx.write_ty(id, ty::mk_nil(tcx)); - bot = !may_break(tcx, expr.id, body); + bot = !may_break(tcx, expr.id, (*body)); } - ast::expr_match(discrim, arms) => { - bot = alt::check_alt(fcx, expr, discrim, arms); + ast::expr_match(discrim, ref arms) => { + bot = alt::check_alt(fcx, expr, discrim, (*arms)); } - ast::expr_fn(proto, decl, body, cap_clause) => { + ast::expr_fn(proto, decl, ref body, cap_clause) => { check_expr_fn(fcx, expr, Some(proto), - decl, body, false, + decl, (*body), false, expected); capture::check_capture_clause(tcx, expr.id, cap_clause); } - ast::expr_fn_block(decl, body, cap_clause) => { + ast::expr_fn_block(decl, ref body, cap_clause) => { check_expr_fn(fcx, expr, None, - decl, body, false, + decl, (*body), false, expected); capture::check_capture_clause(tcx, expr.id, cap_clause); } @@ -2058,9 +2058,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // 2. the closure that was given returns unit let expected_sty = unpack_expected(fcx, expected, |x| Some(x)); let inner_ty = match expected_sty { - Some(ty::ty_fn(fty)) => { + Some(ty::ty_fn(ref fty)) => { match fcx.mk_subty(false, expr.span, - fty.sig.output, ty::mk_bool(tcx)) { + (*fty).sig.output, ty::mk_bool(tcx)) { result::Ok(_) => (), result::Err(_) => { fcx.type_error_message(expr.span, @@ -2068,15 +2068,15 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, fmt!("a `loop` function's last argument \ should return `bool`, not `%s`", actual) }, - fty.sig.output, None); + (*fty).sig.output, None); fcx.write_ty(id, ty::mk_err(tcx)); return true; } } ty::mk_fn(tcx, FnTyBase { - meta: fty.meta, + meta: (*fty).meta, sig: FnSig {output: ty::mk_nil(tcx), - ..fty.sig} + ..(*fty).sig} }) } _ => @@ -2097,9 +2097,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, } }; match b.node { - ast::expr_fn_block(decl, body, cap_clause) => { + ast::expr_fn_block(decl, ref body, cap_clause) => { check_expr_fn(fcx, b, None, - decl, body, true, + decl, (*body), true, Some(inner_ty)); demand::suptype(fcx, b.span, inner_ty, fcx.expr_ty(b)); capture::check_capture_clause(tcx, b.id, cap_clause); @@ -2110,11 +2110,11 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let block_ty = structurally_resolved_type( fcx, expr.span, fcx.node_ty(b.id)); match ty::get(block_ty).sty { - ty::ty_fn(fty) => { + ty::ty_fn(ref fty) => { fcx.write_ty(expr.id, ty::mk_fn(tcx, FnTyBase { - meta: fty.meta, + meta: (*fty).meta, sig: FnSig {output: ty::mk_bool(tcx), - ..fty.sig} + ..(*fty).sig} })) } _ => fail ~"expected fn type" @@ -2123,8 +2123,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, ast::expr_do_body(b) => { let expected_sty = unpack_expected(fcx, expected, |x| Some(x)); let inner_ty = match expected_sty { - Some(ty::ty_fn(fty)) => { - ty::mk_fn(tcx, fty) + Some(ty::ty_fn(ref fty)) => { + ty::mk_fn(tcx, (*fty)) } _ => match expected { Some(expected_t) => { @@ -2141,9 +2141,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, } }; match b.node { - ast::expr_fn_block(decl, body, cap_clause) => { + ast::expr_fn_block(decl, ref body, cap_clause) => { check_expr_fn(fcx, b, None, - decl, body, true, + decl, (*body), true, Some(inner_ty)); demand::suptype(fcx, b.span, inner_ty, fcx.expr_ty(b)); capture::check_capture_clause(tcx, b.id, cap_clause); @@ -2154,17 +2154,17 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let block_ty = structurally_resolved_type( fcx, expr.span, fcx.node_ty(b.id)); match ty::get(block_ty).sty { - ty::ty_fn(fty) => { - fcx.write_ty(expr.id, ty::mk_fn(tcx, fty)); + ty::ty_fn(ref fty) => { + fcx.write_ty(expr.id, ty::mk_fn(tcx, (*fty))); } _ => fail ~"expected fn ty" } } - ast::expr_block(b) => { + ast::expr_block(ref b) => { // If this is an unchecked block, turn off purity-checking - bot = check_block(fcx, b); + bot = check_block(fcx, (*b)); let typ = - match b.node.expr { + match (*b).node.expr { Some(expr) => fcx.expr_ty(expr), None => ty::mk_nil(tcx) }; @@ -2246,7 +2246,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let typ = ty::mk_tup(tcx, elt_ts); fcx.write_ty(id, typ); } - ast::expr_rec(fields, base) => { + ast::expr_rec(ref fields, base) => { option::iter(&base, |b| { check_expr(fcx, *b, expected); }); let expected = if expected.is_none() && base.is_some() { Some(fcx.expr_ty(base.get())) @@ -2254,7 +2254,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let flds = unpack_expected(fcx, expected, |sty| match sty { ty::ty_rec(flds) => Some(flds), _ => None } ); - let fields_t = vec::map(fields, |f| { + let fields_t = vec::map((*fields), |f| { bot |= check_expr(fcx, f.node.expr, flds.chain_ref(|flds| vec::find(*flds, |tf| tf.ident == f.node.ident) ).map(|tf| tf.mt.ty)); @@ -2274,7 +2274,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, that we're extending a record we know has no dup fields, and it would be ill-typed anyway if we duplicated one of its fields */ - check_no_duplicate_fields(tcx, fields.map(|f| + check_no_duplicate_fields(tcx, (*fields).map(|f| (f.node.ident, f.span))); } Some(bexpr) => { @@ -2309,16 +2309,16 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, } } } - ast::expr_struct(path, fields, base_expr) => { + ast::expr_struct(path, ref fields, base_expr) => { // Resolve the path. match tcx.def_map.find(id) { Some(ast::def_class(type_def_id)) => { check_struct_constructor(fcx, id, expr.span, type_def_id, - fields, base_expr); + (*fields), base_expr); } Some(ast::def_variant(enum_id, variant_id)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, fields); + variant_id, (*fields)); } _ => { tcx.sess.span_bug(path.span, ~"structure constructor does \ @@ -2571,9 +2571,9 @@ fn check_enum_variants(ccx: @crate_ctxt, ccx.tcx.sess.span_err(e.span, ~"expected signed integer \ constant"); } - Err(err) => { + Err(ref err) => { ccx.tcx.sess.span_err(e.span, - fmt!("expected constant: %s", err)); + fmt!("expected constant: %s", (*err))); } } @@ -2977,9 +2977,9 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::foreign_item) { ~"morestack_addr" => { (0u, ~[], ty::mk_nil_ptr(tcx)) } - other => { + ref other => { tcx.sess.span_err(it.span, ~"unrecognized intrinsic function: `" + - other + ~"`"); + (*other) + ~"`"); return; } }; diff --git a/src/librustc/middle/typeck/check/regionmanip.rs b/src/librustc/middle/typeck/check/regionmanip.rs index 4392864bd35..8431c175eff 100644 --- a/src/librustc/middle/typeck/check/regionmanip.rs +++ b/src/librustc/middle/typeck/check/regionmanip.rs @@ -26,7 +26,7 @@ fn replace_bound_regions_in_fn_ty( // Take self_info apart; the self_ty part is the only one we want // to update here. let (self_ty, rebuild_self_info) = match self_info { - Some(s) => (Some(s.self_ty), |t| Some({self_ty: t,.. s})), + Some(copy s) => (Some(s.self_ty), |t| Some({self_ty: t,.. s})), None => (None, |_t| None) }; @@ -76,7 +76,7 @@ fn replace_bound_regions_in_fn_ty( return {isr: isr, self_info: new_self_info, - fn_ty: match ty::get(t_fn).sty { ty::ty_fn(o) => o, + fn_ty: match ty::get(t_fn).sty { ty::ty_fn(ref o) => (*o), _ => tcx.sess.bug(~"replace_bound_regions_in_fn_ty: impossible")}}; diff --git a/src/librustc/middle/typeck/check/vtable.rs b/src/librustc/middle/typeck/check/vtable.rs index b05bc5162db..0494f1563cf 100644 --- a/src/librustc/middle/typeck/check/vtable.rs +++ b/src/librustc/middle/typeck/check/vtable.rs @@ -125,7 +125,7 @@ fn fixup_substs(vcx: &VtableContext, location_info: &LocationInfo, let t = ty::mk_trait(tcx, id, substs, ty::vstore_slice(ty::re_static)); do fixup_ty(vcx, location_info, t, is_early).map |t_f| { match ty::get(*t_f).sty { - ty::ty_trait(_, substs_f, _) => substs_f, + ty::ty_trait(_, ref substs_f, _) => (*substs_f), _ => fail ~"t_f should be a trait" } } @@ -151,7 +151,7 @@ fn lookup_vtable(vcx: &VtableContext, let tcx = vcx.tcx(); let (trait_id, trait_substs, trait_vstore) = match ty::get(trait_ty).sty { - ty::ty_trait(did, substs, vstore) => (did, substs, vstore), + ty::ty_trait(did, ref substs, vstore) => (did, (*substs), vstore), _ => tcx.sess.impossible_case(location_info.span, "lookup_vtable: \ don't know how to handle a non-trait") @@ -203,7 +203,7 @@ fn lookup_vtable(vcx: &VtableContext, } } - ty::ty_trait(did, substs, _) if trait_id == did => { + ty::ty_trait(did, ref substs, _) if trait_id == did => { debug!("(checking vtable) @1 relating ty to trait ty with did %?", did); @@ -224,7 +224,7 @@ fn lookup_vtable(vcx: &VtableContext, } } } - return Some(vtable_trait(did, substs.tps)); + return Some(vtable_trait(did, (*substs).tps)); } _ => { @@ -357,7 +357,7 @@ fn lookup_vtable(vcx: &VtableContext, trait_id, substs, is_early) { - Some(substs) => substs, + Some(ref substs) => (*substs), None => { assert is_early; // Bail out with a bogus answer @@ -468,8 +468,8 @@ fn connect_trait_tps(vcx: &VtableContext, debug!("(connect trait tps) trait type is %?, impl did is %?", ty::get(trait_ty).sty, impl_did); match ty::get(trait_ty).sty { - ty::ty_trait(_, substs, _) => { - for vec::each2(substs.tps, trait_tys) |a, b| { + ty::ty_trait(_, ref substs, _) => { + for vec::each2((*substs).tps, trait_tys) |a, b| { demand_suptype(vcx, location_info.span, *a, *b); } } diff --git a/src/librustc/middle/typeck/check/writeback.rs b/src/librustc/middle/typeck/check/writeback.rs index 76519e81b84..12d8f06a3f6 100644 --- a/src/librustc/middle/typeck/check/writeback.rs +++ b/src/librustc/middle/typeck/check/writeback.rs @@ -100,9 +100,9 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id) id, ty_to_str(tcx, n_ty), ty_to_str(tcx, t)); write_ty_to_tcx(tcx, id, t); match fcx.opt_node_ty_substs(id) { - Some(substs) => { + Some(ref substs) => { let mut new_tps = ~[]; - for substs.tps.each |subst| { + for (*substs).tps.each |subst| { match resolve_type_vars_in_type(fcx, sp, *subst) { Some(t) => new_tps.push(t), None => { wbcx.success = false; return None; } diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index 708fda07037..c1517d11f73 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -627,8 +627,8 @@ impl CoherenceChecker { match self.crate_context.tcx.items.find(method_def_id.node) { Some(ast_map::node_trait_method(trait_method, _, _)) => { match *trait_method { - ast::required(ty_method) => { - attr::attrs_contains_name(ty_method.attrs, + ast::required(ref ty_method) => { + attr::attrs_contains_name((*ty_method).attrs, ~"derivable") } ast::provided(method) => { @@ -1028,8 +1028,8 @@ impl CoherenceChecker { // Destructors only work on nominal types. if impl_info.did.crate == ast::local_crate { match tcx.items.find(impl_info.did.node) { - Some(ast_map::node_item(@item, _)) => { - tcx.sess.span_err(item.span, + Some(ast_map::node_item(@ref item, _)) => { + tcx.sess.span_err((*item).span, ~"the Drop trait may only \ be implemented on \ structures"); diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index 00f6eada345..c29ec88957b 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -108,9 +108,9 @@ impl @crate_ctxt: ast_conv { Some(ast_map::node_foreign_item(foreign_item, _, _)) => { ty_of_foreign_item(self, foreign_item) } - x => { + ref x => { self.tcx.sess.bug(fmt!("unexpected sort of item \ - in get_item_ty(): %?", x)); + in get_item_ty(): %?", (*x))); } } } @@ -164,8 +164,8 @@ fn get_enum_variant_types(ccx: @crate_ctxt, convert_struct( ccx, rp, struct_def, ty_params, tpt, variant.node.id); } - ast::enum_variant_kind(enum_definition) => { - get_enum_variant_types(ccx, enum_ty, enum_definition.variants, + ast::enum_variant_kind(ref enum_definition) => { + get_enum_variant_types(ccx, enum_ty, (*enum_definition).variants, ty_params, rp); result_ty = None; } @@ -232,11 +232,11 @@ fn ensure_trait_methods(ccx: @crate_ctxt, id: ast::node_id, trait_ty: ty::t) { let tcx = ccx.tcx; let region_paramd = tcx.region_paramd_items.find(id); match tcx.items.get(id) { - ast_map::node_item(@{node: ast::item_trait(params, _, ms), _}, _) => { - store_methods::<ast::trait_method>(ccx, id, ms, |m| { + ast_map::node_item(@{node: ast::item_trait(params, _, ref ms), _}, _) => { + store_methods::<ast::trait_method>(ccx, id, (*ms), |m| { let def_id; match *m { - ast::required(ty_method) => def_id = local_def(ty_method.id), + ast::required(ref ty_method) => def_id = local_def((*ty_method).id), ast::provided(method) => def_id = local_def(method.id) } @@ -550,10 +550,10 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) { match it.node { // These don't define types. ast::item_foreign_mod(_) | ast::item_mod(_) => {} - ast::item_enum(enum_definition, ty_params) => { + ast::item_enum(ref enum_definition, ty_params) => { let tpt = ty_of_item(ccx, it); write_ty_to_tcx(tcx, it.id, tpt.ty); - get_enum_variant_types(ccx, tpt.ty, enum_definition.variants, + get_enum_variant_types(ccx, tpt.ty, (*enum_definition).variants, ty_params, rp); } ast::item_impl(tps, trait_ref, selfty, ms) => { @@ -570,7 +570,7 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) { check_methods_against_trait(ccx, tps, rp, selfty, *t, cms); } } - ast::item_trait(tps, supertraits, trait_methods) => { + ast::item_trait(tps, supertraits, ref trait_methods) => { let tpt = ty_of_item(ccx, it); debug!("item_trait(it.id=%d, tpt.ty=%s)", it.id, ty_to_str(tcx, tpt.ty)); @@ -578,7 +578,7 @@ fn convert(ccx: @crate_ctxt, it: @ast::item) { ensure_trait_methods(ccx, it.id, tpt.ty); ensure_supertraits(ccx, it.id, it.span, rp, supertraits); - let (_, provided_methods) = split_trait_methods(trait_methods); + let (_, provided_methods) = split_trait_methods((*trait_methods)); let {bounds, _} = mk_substs(ccx, tps, rp); let _cms = convert_methods(ccx, provided_methods, rp, bounds); // FIXME (#2616): something like this, when we start having @@ -634,7 +634,7 @@ fn convert_struct(ccx: @crate_ctxt, for struct_def.fields.each |f| { convert_field(ccx, rp, tpt.bounds, *f); } - let {bounds, substs} = mk_substs(ccx, tps, rp); + let {bounds: bounds, substs: substs} = mk_substs(ccx, tps, rp); let selfty = ty::mk_class(tcx, local_def(id), substs); let cms = convert_methods(ccx, struct_def.methods, rp, bounds); for struct_def.traits.each |trait_ref| { @@ -813,7 +813,7 @@ fn ty_of_item(ccx: @crate_ctxt, it: @ast::item) } ast::item_enum(_, tps) => { // Create a new generic polytype. - let {bounds, substs} = mk_substs(ccx, tps, rp); + let {bounds: bounds, substs: substs} = mk_substs(ccx, tps, rp); let t = ty::mk_enum(tcx, local_def(it.id), substs); let tpt = {bounds: bounds, region_param: rp, @@ -822,7 +822,7 @@ fn ty_of_item(ccx: @crate_ctxt, it: @ast::item) return tpt; } ast::item_trait(tps, _, _) => { - let {bounds, substs} = mk_substs(ccx, tps, rp); + let {bounds: bounds, substs: substs} = mk_substs(ccx, tps, rp); let t = ty::mk_trait(tcx, local_def(it.id), substs, ty::vstore_box); let tpt = {bounds: bounds, region_param: rp, @@ -831,7 +831,7 @@ fn ty_of_item(ccx: @crate_ctxt, it: @ast::item) return tpt; } ast::item_class(_, tps) => { - let {bounds,substs} = mk_substs(ccx, tps, rp); + let {bounds: bounds, substs: substs} = mk_substs(ccx, tps, rp); let t = ty::mk_class(tcx, local_def(it.id), substs); let tpt = {bounds: bounds, region_param: rp, diff --git a/src/librustc/middle/typeck/infer/assignment.rs b/src/librustc/middle/typeck/infer/assignment.rs index 748d6550de3..2ceff545eb7 100644 --- a/src/librustc/middle/typeck/infer/assignment.rs +++ b/src/librustc/middle/typeck/infer/assignment.rs @@ -64,7 +64,7 @@ use combine::combine_fields; fn to_ares(+c: cres<ty::t>) -> ares { match c { Ok(_) => Ok(None), - Err(e) => Err(e) + Err(ref e) => Err((*e)) } } @@ -198,7 +198,7 @@ priv impl Assign { (ty::ty_rptr(_, ref a_t), ty::ty_ptr(ref b_t)) => { match Sub(*self).mts(*a_t, *b_t) { Ok(_) => Ok(None), - Err(e) => Err(e) + Err(ref e) => Err((*e)) } } diff --git a/src/librustc/middle/typeck/infer/mod.rs b/src/librustc/middle/typeck/infer/mod.rs index 2c5730d61ca..0d0a1d761cc 100644 --- a/src/librustc/middle/typeck/infer/mod.rs +++ b/src/librustc/middle/typeck/infer/mod.rs @@ -521,8 +521,8 @@ trait ToUres { impl<T> cres<T>: ToUres { fn to_ures() -> ures { match self { - Ok(_v) => Ok(()), - Err(e) => Err(e) + Ok(ref _v) => Ok(()), + Err(ref e) => Err((*e)) } } } @@ -761,7 +761,7 @@ impl infer_ctxt { &self, span: span, fty: &ty::FnTy) -> (ty::FnTy, isr_alist) { - let {fn_ty, isr, _} = + let {fn_ty: fn_ty, isr: isr, _} = replace_bound_regions_in_fn_ty(self.tcx, @Nil, None, fty, |br| { // N.B.: The name of the bound region doesn't have anything to // do with the region variable that's created for it. The diff --git a/src/librustc/middle/typeck/infer/region_inference.rs b/src/librustc/middle/typeck/infer/region_inference.rs index 01caac22f23..f49d1740018 100644 --- a/src/librustc/middle/typeck/infer/region_inference.rs +++ b/src/librustc/middle/typeck/infer/region_inference.rs @@ -620,8 +620,8 @@ impl RegionVarBindings { AddConstraint(constraint) => { self.constraints.remove(constraint); } - AddCombination(map, regions) => { - map.remove(regions); + AddCombination(map, ref regions) => { + map.remove((*regions)); } } } diff --git a/src/librustc/middle/typeck/infer/sub.rs b/src/librustc/middle/typeck/infer/sub.rs index d6a97e3d7f5..b7769ac876d 100644 --- a/src/librustc/middle/typeck/infer/sub.rs +++ b/src/librustc/middle/typeck/infer/sub.rs @@ -48,7 +48,7 @@ impl Sub: combine { do indent { match self.infcx.region_vars.make_subregion(self.span, a, b) { Ok(()) => Ok(a), - Err(e) => Err(e) + Err(ref e) => Err((*e)) } } } diff --git a/src/librustc/middle/typeck/infer/to_str.rs b/src/librustc/middle/typeck/infer/to_str.rs index 75024ca1a66..a882f20c04b 100644 --- a/src/librustc/middle/typeck/infer/to_str.rs +++ b/src/librustc/middle/typeck/infer/to_str.rs @@ -43,7 +43,7 @@ impl ty::FnTy: ToStr { impl<V:Copy ToStr> bound<V>: ToStr { fn to_str(cx: infer_ctxt) -> ~str { match self { - Some(v) => v.to_str(cx), + Some(ref v) => (*v).to_str(cx), None => ~"none" } } @@ -76,8 +76,8 @@ impl float_ty_set: ToStr { impl<V:Copy vid, T:Copy ToStr> var_value<V, T>: ToStr { fn to_str(cx: infer_ctxt) -> ~str { match self { - redirect(vid) => fmt!("redirect(%s)", vid.to_str()), - root(pt, rk) => fmt!("root(%s, %s)", pt.to_str(cx), + redirect(ref vid) => fmt!("redirect(%s)", (*vid).to_str()), + root(ref pt, rk) => fmt!("root(%s, %s)", (*pt).to_str(cx), uint::to_str(rk, 10u)) } } diff --git a/src/librustc/middle/typeck/infer/unify.rs b/src/librustc/middle/typeck/infer/unify.rs index 7a679c6c88f..5fa4acecdc8 100644 --- a/src/librustc/middle/typeck/infer/unify.rs +++ b/src/librustc/middle/typeck/infer/unify.rs @@ -39,18 +39,18 @@ impl infer_ctxt { None => { self.tcx.sess.bug(fmt!("failed lookup of vid `%u`", vid_u)); } - Some(var_val) => { - match var_val { - redirect(vid) => { - let node = self.get(vb, vid); - if node.root != vid { + Some(ref var_val) => { + match (*var_val) { + redirect(ref vid) => { + let node = self.get(vb, (*vid)); + if node.root != (*vid) { // Path compression - vb.vals.insert(vid.to_uint(), redirect(node.root)); + vb.vals.insert((*vid).to_uint(), redirect(node.root)); } node } - root(pt, rk) => { - node {root: vid, possible_types: pt, rank: rk} + root(ref pt, rk) => { + node {root: vid, possible_types: (*pt), rank: rk} } } } diff --git a/src/librustc/middle/typeck/mod.rs b/src/librustc/middle/typeck/mod.rs index 9ca7c4f3a13..f9f3ac2d45c 100644 --- a/src/librustc/middle/typeck/mod.rs +++ b/src/librustc/middle/typeck/mod.rs @@ -332,7 +332,7 @@ fn check_main_fn_ty(ccx: @crate_ctxt, let tcx = ccx.tcx; let main_t = ty::node_id_to_type(tcx, main_id); match ty::get(main_t).sty { - ty::ty_fn(fn_ty) => { + ty::ty_fn(ref fn_ty) => { match tcx.items.find(main_id) { Some(ast_map::node_item(it,_)) => { match it.node { @@ -348,8 +348,8 @@ fn check_main_fn_ty(ccx: @crate_ctxt, } _ => () } - let mut ok = ty::type_is_nil(fn_ty.sig.output); - let num_args = vec::len(fn_ty.sig.inputs); + let mut ok = ty::type_is_nil((*fn_ty).sig.output); + let num_args = vec::len((*fn_ty).sig.inputs); ok &= num_args == 0u; if !ok { tcx.sess.span_err( diff --git a/src/librustc/rustc.rc b/src/librustc/rustc.rc index ff7e974ed9b..01e50f7cf83 100644 --- a/src/librustc/rustc.rc +++ b/src/librustc/rustc.rc @@ -27,7 +27,7 @@ #[allow(vecs_implicitly_copyable)]; #[allow(non_camel_case_types)]; #[allow(deprecated_mode)]; -#[allow(deprecated_pattern)]; +#[warn(deprecated_pattern)]; extern mod core(vers = "0.5"); extern mod std(vers = "0.5"); @@ -334,9 +334,9 @@ fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) { let matches = &match getopts::groups::getopts(args, optgroups()) { - Ok(m) => m, - Err(f) => { - early_error(demitter, getopts::fail_str(f)) + Ok(ref m) => (*m), + Err(ref f) => { + early_error(demitter, getopts::fail_str((*f))) } }; @@ -396,8 +396,8 @@ fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) { let ls = opt_present(matches, ~"ls"); if ls { match input { - file_input(ifile) => { - list_metadata(sess, &ifile, io::stdout()); + file_input(ref ifile) => { + list_metadata(sess, &(*ifile), io::stdout()); } str_input(_) => { early_error(demitter, ~"can not list metadata for stdin"); diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 8d465dfe71f..6e08c22971c 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -39,14 +39,14 @@ fn note_and_explain_region(cx: ctxt, region: ty::Region, suffix: ~str) { match explain_region_and_span(cx, region) { - (str, Some(span)) => { + (ref str, Some(span)) => { cx.sess.span_note( span, - fmt!("%s%s%s", prefix, str, suffix)); + fmt!("%s%s%s", prefix, (*str), suffix)); } - (str, None) => { + (ref str, None) => { cx.sess.note( - fmt!("%s%s%s", prefix, str, suffix)); + fmt!("%s%s%s", prefix, (*str), suffix)); } } } @@ -65,8 +65,8 @@ fn explain_region_and_span(cx: ctxt, region: ty::Region) return match region { re_scope(node_id) => { match cx.items.find(node_id) { - Some(ast_map::node_block(blk)) => { - explain_span(cx, ~"block", blk.span) + Some(ast_map::node_block(ref blk)) => { + explain_span(cx, ~"block", (*blk).span) } Some(ast_map::node_expr(expr)) => { match expr.node { @@ -95,8 +95,8 @@ fn explain_region_and_span(cx: ctxt, region: ty::Region) }; match cx.items.find(id) { - Some(ast_map::node_block(blk)) => { - let (msg, opt_span) = explain_span(cx, ~"block", blk.span); + Some(ast_map::node_block(ref blk)) => { + let (msg, opt_span) = explain_span(cx, ~"block", (*blk).span); (fmt!("%s %s", prefix, msg), opt_span) } Some(_) | None => { @@ -143,9 +143,9 @@ fn bound_region_to_str_adorned(cx: ctxt, prefix: ~str, fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { match cx.items.find(node_id) { - Some(ast_map::node_block(blk)) => { + Some(ast_map::node_block(ref blk)) => { fmt!("<block at %s>", - cx.sess.codemap.span_to_str(blk.span)) + cx.sess.codemap.span_to_str((*blk).span)) } Some(ast_map::node_expr(expr)) => { match expr.node { @@ -408,15 +408,15 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { ~"'" + str::from_bytes(~[('a' as u8) + (id as u8)]) } ty_self => ~"self", - ty_enum(did, substs) | ty_class(did, substs) => { + ty_enum(did, ref substs) | ty_class(did, ref substs) => { let path = ty::item_path(cx, did); let base = ast_map::path_to_str(path, cx.sess.intr()); - parameterized(cx, base, substs.self_r, substs.tps) + parameterized(cx, base, (*substs).self_r, (*substs).tps) } - ty_trait(did, substs, vs) => { + ty_trait(did, ref substs, vs) => { let path = ty::item_path(cx, did); let base = ast_map::path_to_str(path, cx.sess.intr()); - let result = parameterized(cx, base, substs.self_r, substs.tps); + let result = parameterized(cx, base, (*substs).self_r, (*substs).tps); vstore_ty_to_str(cx, result, vs) } ty_evec(mt, vs) => { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 46fd7be656e..1e7e427a384 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -555,9 +555,9 @@ impl<T: to_bytes::IterBytes> inferable<T> : to_bytes::IterBytes { impl<T:cmp::Eq> inferable<T> : cmp::Eq { pure fn eq(&self, other: &inferable<T>) -> bool { match (*self) { - expl(e0a) => { + expl(ref e0a) => { match (*other) { - expl(e0b) => e0a == e0b, + expl(ref e0b) => (*e0a) == (*e0b), _ => false } } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index d13ec70a975..d04447e8d73 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -163,8 +163,8 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, cx.local_id += 1u; } match fk { - visit::fk_dtor(tps, attrs, self_id, parent_id) => { - let dt = @{node: {id: id, attrs: attrs, self_id: self_id, + visit::fk_dtor(tps, ref attrs, self_id, parent_id) => { + let dt = @{node: {id: id, attrs: (*attrs), self_id: self_id, body: /* FIXME (#2543) */ copy body}, span: sp}; cx.map.insert(id, node_dtor(/* FIXME (#2543) */ copy tps, dt, parent_id, @@ -219,8 +219,8 @@ fn map_item(i: @item, cx: ctx, v: vt) { map_method(impl_did, extend(cx, i.ident), *m, cx); } } - item_enum(enum_definition, _) => { - for enum_definition.variants.each |v| { + item_enum(ref enum_definition, _) => { + for (*enum_definition).variants.each |v| { cx.map.insert(v.node.id, node_variant( /* FIXME (#2543) */ copy *v, i, extend(cx, i.ident))); @@ -228,7 +228,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { } item_foreign_mod(nm) => { let abi = match attr::foreign_abi(i.attrs) { - either::Left(msg) => cx.diag.span_fatal(i.span, msg), + either::Left(ref msg) => cx.diag.span_fatal(i.span, (*msg)), either::Right(abi) => abi }; for nm.items.each |nitem| { @@ -249,7 +249,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { map_struct_def(struct_def, node_item(i, item_path), i.ident, i.id, cx, v); } - item_trait(_, traits, methods) => { + item_trait(_, traits, ref methods) => { // Map trait refs to their parent classes. This is // so we can find the self_ty for traits.each |p| { @@ -258,7 +258,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { // encoding/decoding cx.map.insert(p.impl_id, node_item(i, item_path)); } - for methods.each |tm| { + for (*methods).each |tm| { let id = ast_util::trait_method_to_ty_method(*tm).id; let d_id = ast_util::local_def(i.id); cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); @@ -368,9 +368,9 @@ fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { fmt!("method %s in %s (id=%?)", *itr.get(m.ident), path_to_str(*path, itr), id) } - Some(node_variant(variant, _, path)) => { + Some(node_variant(ref variant, _, path)) => { fmt!("variant %s in %s (id=%?)", - *itr.get(variant.node.name), path_to_str(*path, itr), id) + *itr.get((*variant).node.name), path_to_str(*path, itr), id) } Some(node_expr(expr)) => { fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index c5a45248e8e..7365ad12ba9 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -205,8 +205,8 @@ fn is_exported(i: ident, m: _mod) -> bool { for m.items.each |it| { if it.ident == i { local = true; } match it.node { - item_enum(enum_definition, _) => - for enum_definition.variants.each |v| { + item_enum(ref enum_definition, _) => + for (*enum_definition).variants.each |v| { if v.node.name == i { local = true; parent_enum = Some(/* FIXME (#2543) */ copy it.ident); @@ -233,10 +233,10 @@ fn is_exported(i: ident, m: _mod) -> bool { } } - ast::view_path_list(path, ids, _) => { + ast::view_path_list(path, ref ids, _) => { if vec::len(path.idents) == 1u { if i == path.idents[0] { return true; } - for ids.each |id| { + for (*ids).each |id| { if id.node.name == i { return true; } } } else { @@ -314,7 +314,7 @@ fn public_methods(ms: ~[@method]) -> ~[@method] { // a default, pull out the useful fields to make a ty_method fn trait_method_to_ty_method(method: trait_method) -> ty_method { match method { - required(m) => m, + required(ref m) => (*m), provided(m) => { {ident: m.ident, attrs: m.attrs, purity: m.purity, decl: m.decl, @@ -329,7 +329,7 @@ fn split_trait_methods(trait_methods: ~[trait_method]) let mut reqd = ~[], provd = ~[]; for trait_methods.each |trt_method| { match *trt_method { - required(tm) => reqd.push(tm), + required(ref tm) => reqd.push((*tm)), provided(m) => provd.push(m) } }; @@ -364,7 +364,7 @@ impl inlined_item: inlined_item_utils { ii_item(i) => i.id, ii_foreign(i) => i.id, ii_method(_, m) => m.id, - ii_dtor(dtor, _, _, _) => dtor.node.id + ii_dtor(ref dtor, _, _, _) => (*dtor).node.id } } @@ -373,8 +373,8 @@ impl inlined_item: inlined_item_utils { ii_item(i) => (v.visit_item)(i, e, v), ii_foreign(i) => (v.visit_foreign_item)(i, e, v), ii_method(_, m) => visit::visit_method_helper(m, e, v), - ii_dtor(dtor, _, tps, parent_id) => { - visit::visit_class_dtor_helper(dtor, tps, parent_id, e, v); + ii_dtor(ref dtor, _, tps, parent_id) => { + visit::visit_class_dtor_helper((*dtor), tps, parent_id, e, v); } } } @@ -453,8 +453,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_item: fn@(i: @item) { vfn(i.id); match i.node { - item_enum(enum_definition, _) => - for enum_definition.variants.each |v| { vfn(v.node.id); }, + item_enum(ref enum_definition, _) => + for (*enum_definition).variants.each |v| { vfn(v.node.id); }, _ => () } }, @@ -643,7 +643,7 @@ impl Privacy : cmp::Eq { fn has_legacy_export_attr(attrs: &[attribute]) -> bool { for attrs.each |attribute| { match attribute.node.value.node { - meta_word(w) if w == ~"legacy_exports" => { + meta_word(ref w) if (*w) == ~"legacy_exports" => { return true; } _ => {} diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 1c7171ce787..79f78079784 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -124,9 +124,9 @@ fn get_attr_name(attr: ast::attribute) -> ~str { fn get_meta_item_name(meta: @ast::meta_item) -> ~str { match meta.node { - ast::meta_word(n) => n, - ast::meta_name_value(n, _) => n, - ast::meta_list(n, _) => n + ast::meta_word(ref n) => (*n), + ast::meta_name_value(ref n, _) => (*n), + ast::meta_list(ref n, _) => (*n) } } @@ -158,9 +158,9 @@ fn get_meta_item_list(meta: @ast::meta_item) -> Option<~[@ast::meta_item]> { */ fn get_name_value_str_pair(item: @ast::meta_item) -> Option<(~str, ~str)> { match attr::get_meta_item_value_str(item) { - Some(value) => { + Some(ref value) => { let name = attr::get_meta_item_name(item); - Some((name, value)) + Some((name, (*value))) } None => None } @@ -206,12 +206,12 @@ fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool { fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { return match a.node { - ast::meta_word(na) => match b.node { - ast::meta_word(nb) => na == nb, + ast::meta_word(ref na) => match b.node { + ast::meta_word(ref nb) => (*na) == (*nb), _ => false }, - ast::meta_name_value(na, va) => match b.node { - ast::meta_name_value(nb, vb) => na == nb && va.node == vb.node, + ast::meta_name_value(ref na, va) => match b.node { + ast::meta_name_value(ref nb, vb) => (*na) == (*nb) && va.node == vb.node, _ => false }, ast::meta_list(*) => { @@ -256,7 +256,7 @@ fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str) match last_meta_item_by_name(items, name) { Some(item) => match attr::get_meta_item_value_str(item) { - Some(value) => Some(value), + Some(ref value) => Some((*value)), None => None }, None => None @@ -281,9 +281,9 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] { pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool { pure fn key(m: &ast::meta_item) -> ~str { match m.node { - ast::meta_word(name) => name, - ast::meta_name_value(name, _) => name, - ast::meta_list(name, _) => name + ast::meta_word(ref name) => (*name), + ast::meta_name_value(ref name, _) => (*name), + ast::meta_list(ref name, _) => (*name) } } key(*ma) <= key(*mb) @@ -334,8 +334,8 @@ fn foreign_abi(attrs: ~[ast::attribute]) -> Either<~str, ast::foreign_abi> { option::Some(~"stdcall") => { either::Right(ast::foreign_abi_stdcall) } - option::Some(t) => { - either::Left(~"unsupported abi: " + t) + option::Some(ref t) => { + either::Left(~"unsupported abi: " + (*t)) } }; } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index ffc786274b7..5218a753ae0 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -308,10 +308,10 @@ pub impl CodeMap { self.lookup_char_pos_adj( sp.lo + (pos - loc.file.start_pos)) } - FssExternal(eloc) => { - {filename: /* FIXME (#2543) */ copy eloc.filename, - line: eloc.line + loc.line - 1u, - col: if loc.line == 1u {eloc.col + loc.col} else {loc.col}, + FssExternal(ref eloc) => { + {filename: /* FIXME (#2543) */ copy (*eloc).filename, + line: (*eloc).line + loc.line - 1u, + col: if loc.line == 1u {(*eloc).col + loc.col} else {loc.col}, file: None} } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 6aaad755b8b..e42bb00c212 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -285,7 +285,7 @@ fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) { fn expect<T: Copy>(diag: span_handler, opt: Option<T>, msg: fn() -> ~str) -> T { match opt { - Some(t) => t, + Some(ref t) => (*t), None => diag.handler().bug(msg()) } } diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 1242d528194..a42a51fd302 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -120,12 +120,12 @@ fn expand_auto_serialize( do vec::flat_map(in_items) |item| { if item.attrs.any(is_auto_serialize) { match item.node { - ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => { + ast::item_ty(@{node: ast::ty_rec(ref fields), _}, tps) => { let ser_impl = mk_rec_ser_impl( cx, item.span, item.ident, - fields, + (*fields), tps ); @@ -142,12 +142,12 @@ fn expand_auto_serialize( ~[filter_attrs(*item), ser_impl] }, - ast::item_enum(enum_def, tps) => { + ast::item_enum(ref enum_def, tps) => { let ser_impl = mk_enum_ser_impl( cx, item.span, item.ident, - enum_def, + (*enum_def), tps ); @@ -184,12 +184,12 @@ fn expand_auto_deserialize( do vec::flat_map(in_items) |item| { if item.attrs.any(is_auto_deserialize) { match item.node { - ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => { + ast::item_ty(@{node: ast::ty_rec(ref fields), _}, tps) => { let deser_impl = mk_rec_deser_impl( cx, item.span, item.ident, - fields, + (*fields), tps ); @@ -206,12 +206,12 @@ fn expand_auto_deserialize( ~[filter_attrs(*item), deser_impl] }, - ast::item_enum(enum_def, tps) => { + ast::item_enum(ref enum_def, tps) => { let deser_impl = mk_enum_deser_impl( cx, item.span, item.ident, - enum_def, + (*enum_def), tps ); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 79dbbbe0b72..630ba3b8749 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -202,12 +202,12 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn mod_path() -> ~[ast::ident] { return self.mod_path; } fn bt_push(ei: codemap::ExpnInfo) { match ei { - ExpandedFrom({call_site: cs, callie: callie}) => { + ExpandedFrom({call_site: cs, callie: ref callie}) => { self.backtrace = Some(@ExpandedFrom({ call_site: span {lo: cs.lo, hi: cs.hi, expn_info: self.backtrace}, - callie: callie})); + callie: (*callie)})); } } } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 1371cd30308..51db63c819a 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -28,7 +28,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let var = expr_to_str(cx, args[0], ~"env! requires a string"); match os::getenv(var) { option::None => return mk_uniq_str(cx, sp, ~""), - option::Some(s) => return mk_uniq_str(cx, sp, s) + option::Some(ref s) => return mk_uniq_str(cx, sp, (*s)) } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index a9fdcc18661..6efca050fa5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -29,9 +29,9 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, return match e { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. - expr_mac(mac) => { + expr_mac(ref mac) => { - match mac.node { + match (*mac).node { // Old-style macros. For compatibility, will erase this whole // block once we've transitioned. mac_invoc(pth, args, body) => { @@ -50,7 +50,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("%s can only be used as a decorator", *extname)); } Some(normal({expander: exp, span: exp_sp})) => { - let expanded = exp(cx, mac.span, args, body); + let expanded = exp(cx, (*mac).span, args, body); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); @@ -61,7 +61,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, (fully_expanded, s) } Some(macro_defining(ext)) => { - let named_extension = ext(cx, mac.span, args, body); + let named_extension = ext(cx, (*mac).span, args, body); exts.insert(named_extension.name, named_extension.ext); (ast::expr_rec(~[], None), s) } @@ -79,7 +79,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, // Token-tree macros, these will be the only case when we're // finished transitioning. - mac_invoc_tt(pth, tts) => { + mac_invoc_tt(pth, ref tts) => { assert (vec::len(pth.idents) == 1u); /* using idents and token::special_idents would make the the macro names be hygienic */ @@ -90,7 +90,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("macro undefined: '%s'", *extname)) } Some(normal_tt({expander: exp, span: exp_sp})) => { - let expanded = match exp(cx, mac.span, tts) { + let expanded = match exp(cx, (*mac).span, (*tts)) { mr_expr(e) => e, mr_any(expr_maker,_,_) => expr_maker(), _ => cx.span_fatal( @@ -109,8 +109,8 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, Some(normal({expander: exp, span: exp_sp})) => { //convert the new-style invoc for the old-style macro let arg = base::tt_args_to_original_flavor(cx, pth.span, - tts); - let expanded = exp(cx, mac.span, arg, None); + (*tts)); + let expanded = exp(cx, (*mac).span, arg, None); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); @@ -128,7 +128,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, } } - _ => cx.span_bug(mac.span, ~"naked syntactic bit") + _ => cx.span_bug((*mac).span, ~"naked syntactic bit") } } _ => orig(e, s, fld) @@ -158,9 +158,9 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, let new_items = do vec::flat_map(module_.items) |item| { do vec::foldr(item.attrs, ~[*item]) |attr, items| { let mname = match attr.node.value.node { - ast::meta_word(n) => n, - ast::meta_name_value(n, _) => n, - ast::meta_list(n, _) => n + ast::meta_word(ref n) => (*n), + ast::meta_name_value(ref n, _) => (*n), + ast::meta_list(ref n, _) => (*n) }; match exts.find(mname) { None | Some(normal(_)) | Some(macro_defining(_)) @@ -227,10 +227,10 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, &&it: @ast::item, fld: ast_fold) -> Option<@ast::item> { let (pth, tts) = biased_match!( - (it.node) ~ (item_mac({node: mac_invoc_tt(pth, tts), _})) else { + (it.node) ~ (item_mac({node: mac_invoc_tt(pth, ref tts), _})) else { cx.span_bug(it.span, ~"invalid item macro invocation") }; - => (pth, tts) + => (pth, (*tts)) ); let extname = cx.parse_sess().interner.get(pth.idents[0]); @@ -238,22 +238,22 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, None => cx.span_fatal(pth.span, fmt!("macro undefined: '%s!'", *extname)), - Some(normal_tt(expand)) => { + Some(normal_tt(ref expand)) => { if it.ident != parse::token::special_idents::invalid { cx.span_fatal(pth.span, fmt!("macro %s! expects no ident argument, \ given '%s'", *extname, *cx.parse_sess().interner.get(it.ident))); } - ((expand.expander)(cx, it.span, tts), expand.span) + (((*expand).expander)(cx, it.span, tts), (*expand).span) } - Some(item_tt(expand)) => { + Some(item_tt(ref expand)) => { if it.ident == parse::token::special_idents::invalid { cx.span_fatal(pth.span, fmt!("macro %s! expects an ident argument", *extname)); } - ((expand.expander)(cx, it.span, it.ident, tts), expand.span) + (((*expand).expander)(cx, it.span, it.ident, tts), (*expand).span) } _ => cx.span_fatal( it.span, fmt!("%s! is not legal in item position", *extname)) @@ -268,8 +268,8 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, + *extname), mr_any(_, item_maker, _) => option::chain(item_maker(), |i| {fld.fold_item(i)}), - mr_def(mdef) => { - exts.insert(mdef.name, mdef.ext); + mr_def(ref mdef) => { + exts.insert((*mdef).name, (*mdef).ext); None } }; @@ -283,11 +283,11 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, -> (stmt_, span) { let (mac, pth, tts, semi) = biased_match! ( - (s) ~ (stmt_mac(mac, semi)) else return orig(s, sp, fld); - (mac.node) ~ (mac_invoc_tt(pth, tts)) else { - cx.span_bug(mac.span, ~"naked syntactic bit") + (s) ~ (stmt_mac(ref mac, semi)) else return orig(s, sp, fld); + ((*mac).node) ~ (mac_invoc_tt(pth, ref tts)) else { + cx.span_bug((*mac).span, ~"naked syntactic bit") }; - => (mac, pth, tts, semi)); + => ((*mac), pth, (*tts), semi)); assert(vec::len(pth.idents) == 1u); let extname = cx.parse_sess().interner.get(pth.idents[0]); diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index d6ea6791284..e0d3bd03f42 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -255,8 +255,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let nargs = args.len(); for pieces.each |pc| { match *pc { - PieceString(s) => { - piece_exprs.push(mk_uniq_str(cx, fmt_sp, s)) + PieceString(ref s) => { + piece_exprs.push(mk_uniq_str(cx, fmt_sp, (*s))) } PieceConv(conv) => { n += 1u; diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index cfe4a3d19ac..cd76655fef6 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -50,18 +50,18 @@ impl ext_ctxt: proto::visitor<(), (), ()> { fn visit_message(name: ~str, _span: span, _tys: &[@ast::Ty], this: state, next: next_state) { match next { - Some({state: next, tys: next_tys}) => { + Some({state: ref next, tys: next_tys}) => { let proto = this.proto; - if !proto.has_state(next) { + if !proto.has_state((*next)) { // This should be a span fatal, but then we need to // track span information. self.span_err( - proto.get_state(next).span, + proto.get_state((*next)).span, fmt!("message %s steps to undefined state, %s", - name, next)); + name, (*next))); } else { - let next = proto.get_state(next); + let next = proto.get_state((*next)); if next.ty_params.len() != next_tys.len() { self.span_err( diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 87db2b1cf63..1c4dd197105 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -55,10 +55,10 @@ impl message: gen_send { fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item { debug!("pipec: gen_send"); match self { - message(_id, span, tys, this, - Some({state: next, tys: next_tys})) => { + message(ref _id, span, tys, this, + Some({state: ref next, tys: next_tys})) => { debug!("pipec: next state exists"); - let next = this.proto.get_state(next); + let next = this.proto.get_state((*next)); assert next_tys.len() == next.ty_params.len(); let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str())); @@ -139,7 +139,7 @@ impl message: gen_send { cx.expr_block(body)) } - message(_id, span, tys, this, None) => { + message(ref _id, span, tys, this, None) => { debug!("pipec: no next state"); let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str())); @@ -220,8 +220,8 @@ impl state: to_type_decls { let message(name, span, tys, this, next) = *m; let tys = match next { - Some({state: next, tys: next_tys}) => { - let next = this.proto.get_state(next); + Some({state: ref next, tys: next_tys}) => { + let next = this.proto.get_state((*next)); let next_name = cx.str_of(next.data_name()); let dir = match this.dir { diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index f8c4648dd4a..af75c9e71dc 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -55,7 +55,7 @@ enum message { impl message { fn name() -> ~str { match self { - message(id, _, _, _, _) => id + message(ref id, _, _, _, _) => (*id) } } @@ -113,8 +113,8 @@ impl state { fn reachable(f: fn(state) -> bool) { for self.messages.each |m| { match *m { - message(_, _, _, _, Some({state: id, _})) => { - let state = self.proto.get_state(id); + message(_, _, _, _, Some({state: ref id, _})) => { + let state = self.proto.get_state((*id)); if !f(state) { break } } _ => () diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index 346798c9029..e13dfe750b7 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -69,7 +69,7 @@ impl @ast::expr: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);} fn extract_mac() -> Option<ast::mac_> { match (self.node) { - ast::expr_mac({node: mac, _}) => Some(mac), + ast::expr_mac({node: ref mac, _}) => Some((*mac)), _ => None } } @@ -84,7 +84,7 @@ impl @ast::Ty: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);} fn extract_mac() -> Option<ast::mac_> { match (self.node) { - ast::ty_mac({node: mac, _}) => Some(mac), + ast::ty_mac({node: ref mac, _}) => Some((*mac)), _ => None } } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 27760e4117f..b2e651c7e33 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -104,15 +104,15 @@ fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { let e_expn_info = match sp.expn_info { None => build::mk_path(cx, qsp, ids_ext(cx, ~[~"None"])), - Some(@codemap::ExpandedFrom(cr)) => { + Some(@codemap::ExpandedFrom(ref cr)) => { let e_callee = build::mk_rec_e( cx, qsp, ~[{ident: id_ext(cx, ~"name"), ex: build::mk_uniq_str(cx, qsp, - cr.callie.name)}, + (*cr).callie.name)}, {ident: id_ext(cx, ~"span"), - ex: mk_option_span(cx, qsp, cr.callie.span)}]); + ex: mk_option_span(cx, qsp, (*cr).callie.span)}]); let e_expn_info_ = build::mk_call( @@ -121,7 +121,7 @@ fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { ~[build::mk_rec_e( cx, qsp, ~[{ident: id_ext(cx, ~"call_site"), - ex: mk_span(cx, qsp, cr.call_site)}, + ex: mk_span(cx, qsp, (*cr).call_site)}, {ident: id_ext(cx, ~"callie"), ex: e_callee}])]); @@ -327,20 +327,20 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr { fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree) -> @ast::expr { match *tt { - ast::tt_tok(sp, tok) => { + ast::tt_tok(sp, ref tok) => { let e_tok = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_tok"]), ~[mk_span(cx, sp, sp), - mk_token(cx, sp, tok)]); + mk_token(cx, sp, (*tok))]); build::mk_uniq_vec_e(cx, sp, ~[e_tok]) } - ast::tt_delim(tts) => { + ast::tt_delim(ref tts) => { let e_delim = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_delim"]), - ~[mk_tts(cx, sp, tts)]); + ~[mk_tts(cx, sp, (*tts))]); build::mk_uniq_vec_e(cx, sp, ~[e_delim]) } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index 5e47dee548f..1bf24670aab 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -55,8 +55,8 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! { x.span, ~"this argument is an ident, expected " + expected), match_ty(x) => cx.span_fatal( x.span, ~"this argument is a type, expected " + expected), - match_block(x) => cx.span_fatal( - x.span, ~"this argument is a block, expected " + expected), + match_block(ref x) => cx.span_fatal( + (*x).span, ~"this argument is a block, expected " + expected), match_exact => cx.bug(~"what is a match_exact doing in a bindings?") } } @@ -76,10 +76,10 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) -> let mut res = None; for elts.each |elt| { match elt.node { - expr_mac(m) => match m.node { + expr_mac(ref m) => match (*m).node { ast::mac_ellipsis => { if res.is_some() { - cx.span_fatal(m.span, ~"only one ellipsis allowed"); + cx.span_fatal((*m).span, ~"only one ellipsis allowed"); } res = Some({pre: vec::slice(elts, 0u, idx - 1u), @@ -104,7 +104,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> for v.each |elem| { match f(*elem) { None => return None, - Some(fv) => res.push(fv) + Some(ref fv) => res.push((*fv)) } } return Some(res); @@ -112,7 +112,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result { match ad { - leaf(x) => return f(x), + leaf(ref x) => return f((*x)), seq(ads, span) => match option_flatten_map(|x| a_d_map(x, f), *ads) { None => return None, Some(ts) => return Some(seq(@ts, span)) @@ -124,7 +124,7 @@ fn compose_sels(s1: selector, s2: selector) -> selector { fn scomp(s1: selector, s2: selector, m: matchable) -> match_result { return match s1(m) { None => None, - Some(matches) => a_d_map(matches, s2) + Some(ref matches) => a_d_map((*matches), s2) } } return { |x| scomp(s1, s2, x) }; @@ -172,7 +172,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> Option<bindings> { for b.real_binders.each |key, val| { match val(match_expr(e)) { None => never_mind = true, - Some(mtc) => { res.insert(key, mtc); } + Some(ref mtc) => { res.insert(key, (*mtc)); } } }; //HACK: `ret` doesn't work in `for each` @@ -231,14 +231,14 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: Option<arb_depth<matchable>>, idx_path: @mut ~[uint]) -> Option<matchable> { match mmaybe { None => return None, - Some(m) => { - return match follow(m, *idx_path) { + Some(ref m) => { + return match follow((*m), *idx_path) { seq(_, sp) => { cx.span_fatal(sp, ~"syntax matched under ... but not " + ~"used that way.") } - leaf(m) => return Some(m) + leaf(ref m) => return Some((*m)) } } } @@ -337,7 +337,7 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], &&i: ident, _fld: ast_fold) -> ident { return match follow_for_trans(cx, b.find(i), idx_path) { Some(match_ident(a_id)) => a_id.node, - Some(m) => match_error(cx, m, ~"an identifier"), + Some(ref m) => match_error(cx, (*m), ~"an identifier"), None => i } } @@ -353,7 +353,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], rp: None, types: ~[]} } Some(match_path(a_pth)) => *a_pth, - Some(m) => match_error(cx, m, ~"a path"), + Some(ref m) => match_error(cx, (*m), ~"a path"), None => p } } @@ -380,7 +380,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], } Some(match_path(a_pth)) => (expr_path(a_pth), s), Some(match_expr(a_exp)) => (a_exp.node, a_exp.span), - Some(m) => match_error(cx, m, ~"an expression"), + Some(ref m) => match_error(cx, (*m), ~"an expression"), None => orig(e, s, fld) } } @@ -399,7 +399,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], Some(id) => { match follow_for_trans(cx, b.find(id), idx_path) { Some(match_ty(ty)) => (ty.node, ty.span), - Some(m) => match_error(cx, m, ~"a type"), + Some(ref m) => match_error(cx, (*m), ~"a type"), None => orig(t, s, fld) } } @@ -422,10 +422,10 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], return match block_to_ident(blk) { Some(id) => { match follow_for_trans(cx, b.find(id), idx_path) { - Some(match_block(new_blk)) => (new_blk.node, new_blk.span), + Some(match_block(ref new_blk)) => ((*new_blk).node, (*new_blk).span), // possibly allow promotion of ident/path/expr to blocks? - Some(m) => match_error(cx, m, ~"a block"), + Some(ref m) => match_error(cx, (*m), ~"a block"), None => orig(blk, s, fld) } } @@ -468,8 +468,8 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { } } /* FIXME (#2251): handle embedded types and blocks, at least */ - expr_mac(mac) => { - p_t_s_r_mac(cx, mac, s, b); + expr_mac(ref mac) => { + p_t_s_r_mac(cx, (*mac), s, b); } _ => { fn select(cx: ext_ctxt, m: matchable, pat: @expr) -> @@ -548,7 +548,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) { fn_m: fn(ast::mac) -> match_result) -> match_result { return match m { match_expr(e) => match e.node { - expr_mac(mac) => fn_m(mac), + expr_mac(ref mac) => fn_m((*mac)), _ => None }, _ => cx.bug(~"broken traversal in p_t_s_r") @@ -659,15 +659,15 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, match elts[0u].node { - expr_mac(mac) => { - match mac.node { + expr_mac(ref mac) => { + match (*mac).node { mac_invoc(pth, invoc_arg, _) => { match path_to_ident(pth) { Some(id) => { let id_str = cx.str_of(id); match macro_name { None => macro_name = Some(id_str), - Some(other_id) => if id_str != other_id { + Some(ref other_id) => if id_str != (*other_id) { cx.span_fatal(pth.span, ~"macro name must be " + ~"consistent"); @@ -679,7 +679,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, } let arg = match invoc_arg { Some(arg) => arg, - None => cx.span_fatal(mac.span, + None => cx.span_fatal((*mac).span, ~"macro must have arguments") }; clauses.push(@{params: pattern_to_selectors(cx, arg), @@ -689,7 +689,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, // the macro arg situation) } _ => { - cx.span_bug(mac.span, ~"undocumented invariant in \ + cx.span_bug((*mac).span, ~"undocumented invariant in \ add_extension"); } } @@ -712,7 +712,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, return {name: match macro_name { - Some(id) => id, + Some(ref id) => (*id), None => cx.span_fatal(sp, ~"macro definition must have " + ~"at least one clause") }, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 0e1eb2ee2df..aa97646c054 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -83,8 +83,8 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file))); match res { result::Ok(_) => { /* Continue. */ } - result::Err(e) => { - cx.parse_sess().span_diagnostic.handler().fatal(e); + result::Err(ref e) => { + cx.parse_sess().span_diagnostic.handler().fatal((*e)); } } @@ -104,8 +104,8 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, }); return mk_base_vec_e(cx, sp, u8_exprs); } - result::Err(e) => { - cx.parse_sess().span_diagnostic.handler().fatal(e) + result::Err(ref e) => { + cx.parse_sess().span_diagnostic.handler().fatal((*e)) } } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 44a3774ddd0..e51800b8a61 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -117,8 +117,8 @@ type matcher_pos = ~{ }; fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos { - match mpu { - matcher_pos_up(Some(mp)) => copy mp, + match &mpu { + &matcher_pos_up(Some(ref mp)) => copy (*mp), _ => fail } } @@ -127,7 +127,7 @@ fn count_names(ms: &[matcher]) -> uint { vec::foldl(0u, ms, |ct, m| { ct + match m.node { match_tok(_) => 0u, - match_seq(more_ms, _, _, _, _) => count_names(more_ms), + match_seq(ref more_ms, _, _, _, _) => count_names((*more_ms)), match_nonterminal(_,_,_) => 1u }}) } @@ -184,8 +184,8 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) ret_val: HashMap<ident, @named_match>) { match m { {node: match_tok(_), span: _} => (), - {node: match_seq(more_ms, _, _, _, _), span: _} => { - for more_ms.each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; + {node: match_seq(ref more_ms, _, _, _, _), span: _} => { + for (*more_ms).each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; } {node: match_nonterminal(bind_name, _, idx), span: sp} => { if ret_val.contains_key(bind_name) { @@ -211,8 +211,8 @@ fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) -> HashMap<ident, @named_match> { match parse(sess, cfg, rdr, ms) { success(m) => m, - failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), - error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) + failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)), + error(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)) } } @@ -274,8 +274,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) // the *_t vars are workarounds for the lack of unary move match copy ei.sep { - Some(t) if idx == len => { // we need a separator - if tok == t { //pass the separator + Some(ref t) if idx == len => { // we need a separator + if tok == (*t) { //pass the separator let ei_t = move ei; ei_t.idx += 1; next_eis.push(move ei_t); @@ -293,7 +293,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } else { match copy ei.elts[idx].node { /* need to descend into sequence */ - match_seq(matchers, sep, zero_ok, + match_seq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { if zero_ok { let new_ei = copy ei; @@ -310,7 +310,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) |_m| DVec::<@named_match>()); let ei_t = move ei; cur_eis.push(~{ - elts: matchers, sep: sep, mut idx: 0u, + elts: (*matchers), sep: (*sep), mut idx: 0u, mut up: matcher_pos_up(Some(move ei_t)), matches: move matches, match_lo: match_idx_lo, match_hi: match_idx_hi, @@ -318,9 +318,9 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) }); } match_nonterminal(_,_,_) => { bb_eis.push(move ei) } - match_tok(t) => { + match_tok(ref t) => { let ei_t = move ei; - if t == tok { + if (*t) == tok { ei_t.idx += 1; next_eis.push(move ei_t); } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 0767a3cce83..09415703260 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -84,17 +84,17 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, for lhses.eachi() |i, lhs| { // try each arm's matchers match *lhs { - @matched_nonterminal(nt_matchers(mtcs)) => { + @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating let arg_rdr = new_tt_reader(s_d, itr, None, arg) as reader; - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs) { + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { success(named_matches) => { let rhs = match rhses[i] { // okay, what's your transcriber? - @matched_nonterminal(nt_tt(@tt)) => { - match tt { + @matched_nonterminal(nt_tt(@ref tt)) => { + match (*tt) { // cut off delimiters; don't parse 'em - tt_delim(tts) => tts.slice(1u,tts.len()-1u), + tt_delim(ref tts) => (*tts).slice(1u,(*tts).len()-1u), _ => cx.span_fatal( sp, ~"macro rhs must be delimited") } @@ -113,11 +113,11 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, || p.parse_item(~[/* no attrs*/]), || p.parse_stmt(~[/* no attrs*/])); } - failure(sp, msg) => if sp.lo >= best_fail_spot.lo { + failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; - best_fail_msg = msg; + best_fail_msg = (*msg); }, - error(sp, msg) => cx.span_fatal(sp, msg) + error(sp, ref msg) => cx.span_fatal(sp, (*msg)) } } _ => cx.bug(~"non-matcher found in parsed lhses") diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index eeb8b068b5b..3d901039188 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -130,8 +130,8 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis { } } match t { - tt_delim(tts) | tt_seq(_, tts, _, _) => { - vec::foldl(lis_unconstrained, tts, |lis, tt| + tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => { + vec::foldl(lis_unconstrained, (*tts), |lis, tt| lis_merge(lis, lockstep_iter_size(*tt, r), r)) } tt_tok(*) => lis_unconstrained, @@ -170,8 +170,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { r.cur.idx = 0u; r.repeat_idx[r.repeat_idx.len() - 1u] += 1u; match r.cur.sep { - Some(tk) => { - r.cur_tok = tk; /* repeat same span, I guess */ + Some(ref tk) => { + r.cur_tok = (*tk); /* repeat same span, I guess */ return ret_val; } None => () @@ -181,27 +181,27 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { loop { /* because it's easiest, this handles `tt_delim` not starting with a `tt_tok`, even though it won't happen */ match r.cur.readme[r.cur.idx] { - tt_delim(tts) => { - r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: false, + tt_delim(ref tts) => { + r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: false, sep: None, up: tt_frame_up(option::Some(r.cur)) }; // if this could be 0-length, we'd need to potentially recur here } - tt_tok(sp, tok) => { - r.cur_span = sp; r.cur_tok = tok; + tt_tok(sp, ref tok) => { + r.cur_span = sp; r.cur_tok = (*tok); r.cur.idx += 1u; return ret_val; } - tt_seq(sp, tts, sep, zerok) => { - match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) { + tt_seq(sp, ref tts, ref sep, zerok) => { + match lockstep_iter_size(tt_seq(sp, (*tts), (*sep), zerok), r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ ~"attempted to repeat an expression containing no syntax \ variables matched as repeating at this depth"); } - lis_contradiction(msg) => { /* FIXME #2887 blame macro invoker + lis_contradiction(ref msg) => { /* FIXME #2887 blame macro invoker instead*/ - r.sp_diag.span_fatal(sp, msg); + r.sp_diag.span_fatal(sp, (*msg)); } lis_constraint(len, _) => { if len == 0 { @@ -217,8 +217,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { } else { r.repeat_len.push(len); r.repeat_idx.push(0u); - r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: true, - sep: sep, up: tt_frame_up(option::Some(r.cur))}; + r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: true, + sep: (*sep), up: tt_frame_up(option::Some(r.cur))}; } } } @@ -234,8 +234,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { r.cur.idx += 1u; return ret_val; } - matched_nonterminal(other_whole_nt) => { - r.cur_span = sp; r.cur_tok = INTERPOLATED(other_whole_nt); + matched_nonterminal(ref other_whole_nt) => { + r.cur_span = sp; r.cur_tok = INTERPOLATED((*other_whole_nt)); r.cur.idx += 1u; return ret_val; } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 15435f48421..39da8531da8 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -89,14 +89,14 @@ type ast_fold_precursor = @{ fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item { return @{node: match mi.node { - meta_word(id) => meta_word(id), - meta_list(id, mis) => { + meta_word(ref id) => meta_word((*id)), + meta_list(ref id, mis) => { let fold_meta_item = |x|fold_meta_item_(x, fld); - meta_list(/* FIXME: (#2543) */ copy id, + meta_list(/* FIXME: (#2543) */ copy (*id), vec::map(mis, |e| fold_meta_item(*e))) } - meta_name_value(id, s) => { - meta_name_value(id, /* FIXME (#2543) */ copy s) + meta_name_value(ref id, s) => { + meta_name_value((*id), /* FIXME (#2543) */ copy s) } }, span: fld.new_span(mi.span)}; @@ -216,21 +216,21 @@ fn noop_fold_struct_field(&&sf: @struct_field, fld: ast_fold) fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { return match i { item_const(t, e) => item_const(fld.fold_ty(t), fld.fold_expr(e)), - item_fn(decl, purity, typms, body) => { + item_fn(decl, purity, typms, ref body) => { item_fn(fold_fn_decl(decl, fld), purity, fold_ty_params(typms, fld), - fld.fold_block(body)) + fld.fold_block((*body))) } item_mod(m) => item_mod(fld.fold_mod(m)), item_foreign_mod(nm) => item_foreign_mod(fld.fold_foreign_mod(nm)), item_ty(t, typms) => item_ty(fld.fold_ty(t), fold_ty_params(typms, fld)), - item_enum(enum_definition, typms) => { + item_enum(ref enum_definition, typms) => { item_enum(ast::enum_def({ - variants: vec::map(enum_definition.variants, + variants: vec::map((*enum_definition).variants, |x| fld.fold_variant(*x)), - common: option::map(&enum_definition.common, + common: option::map(&(*enum_definition).common, |x| fold_struct_def(*x, fld)) }), fold_ty_params(typms, fld)) } @@ -244,8 +244,8 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { fld.fold_ty(ty), vec::map(*methods, |x| fld.fold_method(*x))) } - item_trait(tps, traits, methods) => { - let methods = do methods.map |method| { + item_trait(tps, traits, ref methods) => { + let methods = do (*methods).map |method| { match *method { required(*) => copy *method, provided(method) => provided(fld.fold_method(method)) @@ -255,9 +255,9 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { vec::map(traits, |p| fold_trait_ref(*p, fld)), move methods) } - item_mac(m) => { + item_mac(ref m) => { // FIXME #2888: we might actually want to do something here. - item_mac(m) + item_mac((*m)) } }; } @@ -320,7 +320,7 @@ fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ { stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), - stmt_mac(mac, semi) => stmt_mac(fold_mac(mac), semi) + stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) }; } @@ -409,8 +409,8 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { } expr_repeat(expr, count, mutt) => expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt), - expr_rec(fields, maybe_expr) => { - expr_rec(vec::map(fields, |x| fold_field(*x)), + expr_rec(ref fields, maybe_expr) => { + expr_rec(vec::map((*fields), |x| fold_field(*x)), option::map(&maybe_expr, |x| fld.fold_expr(*x))) } expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(*x))), @@ -435,35 +435,35 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { expr_lit(_) => copy e, expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty), expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)), - expr_if(cond, tr, fl) => { - expr_if(fld.fold_expr(cond), fld.fold_block(tr), + expr_if(cond, ref tr, fl) => { + expr_if(fld.fold_expr(cond), fld.fold_block((*tr)), option::map(&fl, |x| fld.fold_expr(*x))) } - expr_while(cond, body) => { - expr_while(fld.fold_expr(cond), fld.fold_block(body)) + expr_while(cond, ref body) => { + expr_while(fld.fold_expr(cond), fld.fold_block((*body))) } - expr_loop(body, opt_ident) => { - expr_loop(fld.fold_block(body), + expr_loop(ref body, opt_ident) => { + expr_loop(fld.fold_block((*body)), option::map(&opt_ident, |x| fld.fold_ident(*x))) } - expr_match(expr, arms) => { + expr_match(expr, ref arms) => { expr_match(fld.fold_expr(expr), - vec::map(arms, |x| fld.fold_arm(*x))) + vec::map((*arms), |x| fld.fold_arm(*x))) } - expr_fn(proto, decl, body, captures) => { + expr_fn(proto, decl, ref body, captures) => { expr_fn(proto, fold_fn_decl(decl, fld), - fld.fold_block(body), + fld.fold_block((*body)), @((*captures).map(|cap_item| { @({id: fld.new_id(cap_item.id), ..**cap_item})}))) } - expr_fn_block(decl, body, captures) => { - expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block(body), + expr_fn_block(decl, ref body, captures) => { + expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block((*body)), @((*captures).map(|cap_item| { @({id: fld.new_id(cap_item.id), ..**cap_item})}))) } - expr_block(blk) => expr_block(fld.fold_block(blk)), + expr_block(ref blk) => expr_block(fld.fold_block((*blk))), expr_copy(e) => expr_copy(fld.fold_expr(e)), expr_unary_move(e) => expr_unary_move(fld.fold_expr(e)), expr_assign(el, er) => { @@ -492,10 +492,10 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv), fld.fold_expr(e)), expr_assert(e) => expr_assert(fld.fold_expr(e)), - expr_mac(mac) => expr_mac(fold_mac(mac)), - expr_struct(path, fields, maybe_expr) => { + expr_mac(ref mac) => expr_mac(fold_mac((*mac))), + expr_struct(path, ref fields, maybe_expr) => { expr_struct(fld.fold_path(path), - vec::map(fields, |x| fold_field(*x)), + vec::map((*fields), |x| fold_field(*x)), option::map(&maybe_expr, |x| fld.fold_expr(*x))) }, expr_paren(ex) => expr_paren(fld.fold_expr(ex)) @@ -519,7 +519,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ { ty_vec(mt) => ty_vec(fold_mt(mt, fld)), ty_ptr(mt) => ty_ptr(fold_mt(mt, fld)), ty_rptr(region, mt) => ty_rptr(region, fold_mt(mt, fld)), - ty_rec(fields) => ty_rec(vec::map(fields, |f| fold_field(*f, fld))), + ty_rec(ref fields) => ty_rec(vec::map((*fields), |f| fold_field(*f, fld))), ty_fn(f) => ty_fn(@TyFn { proto: f.proto, @@ -533,7 +533,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ { ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)), ty_fixed_length_vec(mt, vs) => ty_fixed_length_vec(fold_mt(mt, fld), vs), - ty_mac(mac) => ty_mac(fold_mac(mac)) + ty_mac(ref mac) => ty_mac(fold_mac((*mac))) } } @@ -579,10 +579,10 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { }) } - enum_variant_kind(enum_definition) => { - let variants = vec::map(enum_definition.variants, + enum_variant_kind(ref enum_definition) => { + let variants = vec::map((*enum_definition).variants, |x| fld.fold_variant(*x)); - let common = option::map(&enum_definition.common, + let common = option::map(&(*enum_definition).common, |x| fold_struct_def(*x, fld)); kind = enum_variant_kind(ast::enum_def({ variants: variants, common: common })); diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 4c14f05d56b..a48e33c9405 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -199,9 +199,9 @@ impl Parser { while self.token != token::GT && self.token != token::BINOP(token::SHR) { match sep { - Some(t) => { + Some(ref t) => { if first { first = false; } - else { self.expect(t); } + else { self.expect((*t)); } } _ => () } @@ -243,9 +243,9 @@ impl Parser { let mut v: ~[T] = ~[]; while self.token != ket { match sep.sep { - Some(t) => { + Some(ref t) => { if first { first = false; } - else { self.expect(t); } + else { self.expect((*t)); } } _ => () } diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index a0cfece6b10..3763a74b9d2 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -406,9 +406,9 @@ fn scan_number(c: char, rdr: string_reader) -> token::Token { num_str += ~"." + dec_part; } match scan_exponent(rdr) { - Some(s) => { + Some(ref s) => { is_float = true; - num_str += s; + num_str += (*s); } None => () } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7cf279d0d81..bebced8f38a 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -136,7 +136,7 @@ macro_rules! maybe_whole_expr ( macro_rules! maybe_whole ( ($p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; } + INTERPOLATED(token::$constructor(ref x)) => { $p.bump(); return (*x); } _ => () }) ; (deref $p:expr, $constructor:ident) => ( match copy $p.token { @@ -155,7 +155,7 @@ macro_rules! maybe_whole ( _ => () }) ; (pair_empty $p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return (~[], x); } + INTERPOLATED(token::$constructor(ref x)) => { $p.bump(); return (~[], (*x)); } _ => () }) @@ -166,7 +166,7 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>) -> ~[attribute] { match rhs { None => lhs, - Some(attrs) => vec::append(lhs, attrs) + Some(ref attrs) => vec::append(lhs, (*attrs)) } } @@ -510,9 +510,9 @@ impl Parser { let lo = self.span.lo; match self.maybe_parse_dollar_mac() { - Some(e) => { + Some(ref e) => { return @{id: self.get_id(), - node: ty_mac(spanned(lo, self.span.hi, e)), + node: ty_mac(spanned(lo, self.span.hi, (*e))), span: mk_sp(lo, self.span.hi)}; } None => () @@ -928,7 +928,7 @@ impl Parser { let mut ex: expr_; match self.maybe_parse_dollar_mac() { - Some(x) => return self.mk_mac_expr(lo, self.span.hi, x), + Some(ref x) => return self.mk_mac_expr(lo, self.span.hi, (*x)), _ => () } @@ -2022,7 +2022,7 @@ impl Parser { pat = pat_tup(fields); } } - tok => { + copy tok => { if !is_ident_or_path(tok) || self.is_keyword(~"true") || self.is_keyword(~"false") @@ -2284,7 +2284,7 @@ impl Parser { let mut item_attrs; match self.parse_outer_attrs_or_ext(first_item_attrs) { None => item_attrs = ~[], - Some(Left(attrs)) => item_attrs = attrs, + Some(Left(ref attrs)) => item_attrs = (*attrs), Some(Right(ext)) => { return @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id())); @@ -2346,8 +2346,8 @@ impl Parser { let lo = self.span.lo; let us = self.eat_keyword(~"unsafe"); self.expect(token::LBRACE); - let {inner, next} = maybe_parse_inner_attrs_and_next(self, - parse_attrs); + let {inner: move inner, next: move next} = + maybe_parse_inner_attrs_and_next(self, parse_attrs); let blk_check_mode = if us { unsafe_blk } else { default_blk }; return (inner, self.parse_block_tail_(lo, blk_check_mode, next)); } @@ -2372,7 +2372,9 @@ impl Parser { let mut stmts = ~[]; let mut expr = None; - let {attrs_remaining, view_items, items: items, _} = + let {attrs_remaining: move attrs_remaining, + view_items: move view_items, + items: items, _} = self.parse_items_and_view_items(first_item_attrs, IMPORTS_AND_ITEMS_ALLOWED, false); @@ -2408,7 +2410,7 @@ impl Parser { token::RBRACE => { expr = Some(e); } - t => { + copy t => { if classify::stmt_ends_with_semi(*stmt) { self.fatal( ~"expected `;` or `}` after \ @@ -2421,12 +2423,12 @@ impl Parser { } } - stmt_mac(m, _) => { + stmt_mac(ref m, _) => { // Statement macro; might be an expr match self.token { token::SEMI => { self.bump(); - stmts.push(@{node: stmt_mac(m, true), + stmts.push(@{node: stmt_mac((*m), true), ..*stmt}); } token::RBRACE => { @@ -2435,7 +2437,7 @@ impl Parser { expr = Some( self.mk_mac_expr(stmt.span.lo, stmt.span.hi, - m.node)); + (*m).node)); } _ => { stmts.push(stmt); } } @@ -2847,7 +2849,7 @@ impl Parser { fields = ~[]; while self.token != token::RBRACE { match self.parse_class_item() { - dtor_decl(blk, attrs, s) => { + dtor_decl(ref blk, ref attrs, s) => { match the_dtor { Some((_, _, s_first)) => { self.span_note(s, fmt!("Duplicate destructor \ @@ -2857,7 +2859,7 @@ impl Parser { declared here"); } None => { - the_dtor = Some((blk, attrs, s)); + the_dtor = Some(((*blk), (*attrs), s)); } } } @@ -3007,7 +3009,9 @@ impl Parser { fn parse_mod_items(term: token::Token, +first_item_attrs: ~[attribute]) -> _mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: starting_items, _} = + let {attrs_remaining: move attrs_remaining, + view_items: move view_items, + items: starting_items, _} = self.parse_items_and_view_items(first_item_attrs, VIEW_ITEMS_AND_ITEMS_ALLOWED, true); @@ -3076,11 +3080,11 @@ impl Parser { // on the mod, then we'll go and suck in another file and merge // its contents match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") { - Some(path) => { + Some(ref path) => { let prefix = Path( self.sess.cm.span_to_filename(copy self.span)); let prefix = prefix.dir_path(); - let path = Path(path); + let path = Path((*path)); let (new_mod_item, new_attrs) = self.eval_src_mod_from_path( prefix, path, ~[], id_span); @@ -3113,7 +3117,7 @@ impl Parser { let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, ~"path") { - Some(d) => d, + Some(ref d) => (*d), None => default_path }; @@ -3143,7 +3147,7 @@ impl Parser { fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str { match ::attr::first_attr_value_str_by_name(attrs, ~"path") { - Some(d) => d, + Some(ref d) => (*d), None => default } } @@ -3208,7 +3212,10 @@ impl Parser { +first_item_attrs: ~[attribute]) -> foreign_mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: _, foreign_items} = + let {attrs_remaining: move attrs_remaining, + view_items: move view_items, + items: _, + foreign_items: move foreign_items} = self.parse_items_and_view_items(first_item_attrs, VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED, true); @@ -3341,7 +3348,7 @@ impl Parser { let mut methods: ~[@method] = ~[]; while self.token != token::RBRACE { match self.parse_class_item() { - dtor_decl(blk, attrs, s) => { + dtor_decl(ref blk, ref attrs, s) => { match the_dtor { Some((_, _, s_first)) => { self.span_note(s, ~"duplicate destructor \ @@ -3351,7 +3358,7 @@ impl Parser { declared here"); } None => { - the_dtor = Some((blk, attrs, s)); + the_dtor = Some(((*blk), (*attrs), s)); } } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 684c8414a01..a0aecd0375e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -190,9 +190,9 @@ fn to_str(in: @ident_interner, t: Token) -> ~str { /* Other */ DOC_COMMENT(s) => *in.get(s), EOF => ~"<eof>", - INTERPOLATED(nt) => { + INTERPOLATED(ref nt) => { ~"an interpolated " + - match nt { + match (*nt) { nt_item(*) => ~"item", nt_block(*) => ~"block", nt_stmt(*) => ~"statement", diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index df0c1214361..547f0141d33 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -387,7 +387,7 @@ fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { print_region(s, ~"&", region, ~"/"); print_mt(s, mt); } - ast::ty_rec(fields) => { + ast::ty_rec(ref fields) => { word(s.s, ~"{"); fn print_field(s: ps, f: ast::ty_field) { cbox(s, indent_unit); @@ -398,7 +398,7 @@ fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { end(s); } fn get_span(f: ast::ty_field) -> codemap::span { return f.span; } - commasep_cmnt(s, consistent, fields, print_field, get_span); + commasep_cmnt(s, consistent, (*fields), print_field, get_span); word(s.s, ~",}"); } ast::ty_tup(elts) => { @@ -479,11 +479,11 @@ fn print_item(s: ps, &&item: @ast::item) { end(s); // end the outer cbox } - ast::item_fn(decl, purity, typarams, body) => { + ast::item_fn(decl, purity, typarams, ref body) => { print_fn(s, decl, Some(purity), item.ident, typarams, None, item.vis); word(s.s, ~" "); - print_block_with_attrs(s, body, item.attrs); + print_block_with_attrs(s, (*body), item.attrs); } ast::item_mod(_mod) => { head(s, visibility_qualified(item.vis, ~"mod")); @@ -522,8 +522,8 @@ fn print_item(s: ps, &&item: @ast::item) { word(s.s, ~";"); end(s); // end the outer ibox } - ast::item_enum(enum_definition, params) => { - print_enum_def(s, enum_definition, params, item.ident, + ast::item_enum(ref enum_definition, params) => { + print_enum_def(s, (*enum_definition), params, item.ident, item.span, item.vis); } ast::item_class(struct_def, tps) => { @@ -558,7 +558,7 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } } - ast::item_trait(tps, traits, methods) => { + ast::item_trait(tps, traits, ref methods) => { head(s, visibility_qualified(item.vis, ~"trait")); print_ident(s, item.ident); print_type_params(s, tps); @@ -569,19 +569,19 @@ fn print_item(s: ps, &&item: @ast::item) { } word(s.s, ~" "); bopen(s); - for methods.each |meth| { + for (*methods).each |meth| { print_trait_method(s, *meth); } bclose(s, item.span); } - ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => { + ast::item_mac({node: ast::mac_invoc_tt(pth, ref tts), _}) => { print_visibility(s, item.vis); print_path(s, pth, false); word(s.s, ~"! "); print_ident(s, item.ident); cbox(s, indent_unit); popen(s); - for tts.each |tt| { + for (*tts).each |tt| { print_tt(s, *tt); } pclose(s); @@ -744,23 +744,23 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param], /// expression arguments as expressions). It can be done! I think. fn print_tt(s: ps, tt: ast::token_tree) { match tt { - ast::tt_delim(tts) => for tts.each() |tt_elt| { print_tt(s, *tt_elt); }, - ast::tt_tok(_, tk) => { - match tk { + ast::tt_delim(ref tts) => for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); }, + ast::tt_tok(_, ref tk) => { + match (*tk) { parse::token::IDENT(*) => { // don't let idents run together if s.s.token_tree_last_was_ident { word(s.s, ~" ") } s.s.token_tree_last_was_ident = true; } _ => { s.s.token_tree_last_was_ident = false; } } - word(s.s, parse::token::to_str(s.intr, tk)); + word(s.s, parse::token::to_str(s.intr, (*tk))); } - ast::tt_seq(_, tts, sep, zerok) => { + ast::tt_seq(_, ref tts, ref sep, zerok) => { word(s.s, ~"$("); - for tts.each() |tt_elt| { print_tt(s, *tt_elt); } + for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); } word(s.s, ~")"); - match sep { - Some(tk) => word(s.s, parse::token::to_str(s.intr, tk)), + match (*sep) { + Some(ref tk) => word(s.s, parse::token::to_str(s.intr, (*tk))), None => () } word(s.s, if zerok { ~"*" } else { ~"+" }); @@ -792,8 +792,8 @@ fn print_variant(s: ps, v: ast::variant) { head(s, ~""); print_struct(s, struct_def, ~[], v.node.name, v.span); } - ast::enum_variant_kind(enum_definition) => { - print_variants(s, enum_definition.variants, v.span); + ast::enum_variant_kind(ref enum_definition) => { + print_variants(s, (*enum_definition).variants, v.span); } } match v.node.disr_expr { @@ -818,7 +818,7 @@ fn print_ty_method(s: ps, m: ast::ty_method) { fn print_trait_method(s: ps, m: ast::trait_method) { match m { - required(ty_m) => print_ty_method(s, ty_m), + required(ref ty_m) => print_ty_method(s, (*ty_m)), provided(m) => print_method(s, m) } } @@ -892,9 +892,9 @@ fn print_stmt(s: ps, st: ast::stmt) { print_expr(s, expr); word(s.s, ~";"); } - ast::stmt_mac(mac, semi) => { + ast::stmt_mac(ref mac, semi) => { space_if_not_bol(s); - print_mac(s, mac); + print_mac(s, (*mac)); if semi { word(s.s, ~";"); } } } @@ -974,21 +974,21 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk, Some(_else) => { match _else.node { // "another else-if" - ast::expr_if(i, t, e) => { + ast::expr_if(i, ref t, e) => { cbox(s, indent_unit - 1u); ibox(s, 0u); word(s.s, ~" else if "); print_expr(s, i); space(s.s); - print_block(s, t); + print_block(s, (*t)); do_else(s, e); } // "final else" - ast::expr_block(b) => { + ast::expr_block(ref b) => { cbox(s, indent_unit - 1u); ibox(s, 0u); word(s.s, ~" else "); - print_block(s, b); + print_block(s, (*b)); } // BLEAH, constraints would be great here _ => { @@ -1014,11 +1014,11 @@ fn print_mac(s: ps, m: ast::mac) { arg.iter(|a| print_expr(s, *a)); // FIXME: extension 'body' (#2339) } - ast::mac_invoc_tt(pth, tts) => { + ast::mac_invoc_tt(pth, ref tts) => { print_path(s, pth, false); word(s.s, ~"!"); popen(s); - for tts.each() |tt| { print_tt(s, *tt); } + for (*tts).each() |tt| { print_tt(s, *tt); } pclose(s); } ast::mac_ellipsis => word(s.s, ~"..."), @@ -1149,9 +1149,9 @@ fn print_expr(s: ps, &&expr: @ast::expr) { end(s); } - ast::expr_rec(fields, wth) => { + ast::expr_rec(ref fields, wth) => { word(s.s, ~"{"); - commasep_cmnt(s, consistent, fields, print_field, get_span); + commasep_cmnt(s, consistent, (*fields), print_field, get_span); match wth { Some(expr) => { ibox(s, indent_unit); @@ -1165,13 +1165,13 @@ fn print_expr(s: ps, &&expr: @ast::expr) { } word(s.s, ~"}"); } - ast::expr_struct(path, fields, wth) => { + ast::expr_struct(path, ref fields, wth) => { print_path(s, path, true); word(s.s, ~"{"); - commasep_cmnt(s, consistent, fields, print_field, get_span); + commasep_cmnt(s, consistent, (*fields), print_field, get_span); match wth { Some(expr) => { - if vec::len(fields) > 0u { space(s.s); } + if vec::len((*fields)) > 0u { space(s.s); } ibox(s, indent_unit); word(s.s, ~","); space(s.s); @@ -1229,33 +1229,33 @@ fn print_expr(s: ps, &&expr: @ast::expr) { word_space(s, ~"as"); print_type_ex(s, ty, true); } - ast::expr_if(test, blk, elseopt) => { - print_if(s, test, blk, elseopt, false); + ast::expr_if(test, ref blk, elseopt) => { + print_if(s, test, (*blk), elseopt, false); } - ast::expr_while(test, blk) => { + ast::expr_while(test, ref blk) => { head(s, ~"while"); print_expr(s, test); space(s.s); - print_block(s, blk); + print_block(s, (*blk)); } - ast::expr_loop(blk, opt_ident) => { + ast::expr_loop(ref blk, opt_ident) => { head(s, ~"loop"); space(s.s); opt_ident.iter(|ident| { print_ident(s, *ident); word_space(s, ~":"); }); - print_block(s, blk); + print_block(s, (*blk)); } - ast::expr_match(expr, arms) => { + ast::expr_match(expr, ref arms) => { cbox(s, alt_indent_unit); ibox(s, 4); word_nbsp(s, ~"match"); print_expr(s, expr); space(s.s); bopen(s); - let len = arms.len(); - for arms.eachi |i, arm| { + let len = (*arms).len(); + for (*arms).eachi |i, arm| { space(s.s); cbox(s, alt_indent_unit); ibox(s, 0u); @@ -1287,10 +1287,10 @@ fn print_expr(s: ps, &&expr: @ast::expr) { match arm.body.node.expr { Some(expr) => { match expr.node { - ast::expr_block(blk) => { + ast::expr_block(ref blk) => { // the block will close the pattern's ibox print_block_unclosed_indent( - s, blk, alt_indent_unit); + s, (*blk), alt_indent_unit); } _ => { end(s); // close the ibox for the pattern @@ -1312,7 +1312,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { } bclose_(s, expr.span, alt_indent_unit); } - ast::expr_fn(proto, decl, body, cap_clause) => { + ast::expr_fn(proto, decl, ref body, cap_clause) => { // containing cbox, will be closed by print-block at } cbox(s, indent_unit); // head-box, will be closed by print-block at start @@ -1321,9 +1321,9 @@ fn print_expr(s: ps, &&expr: @ast::expr) { Some(proto), ast::inherited); print_fn_args_and_ret(s, decl, *cap_clause, None); space(s.s); - print_block(s, body); + print_block(s, (*body)); } - ast::expr_fn_block(decl, body, cap_clause) => { + ast::expr_fn_block(decl, ref body, cap_clause) => { // in do/for blocks we don't want to show an empty // argument list, but at this point we don't know which // we are inside. @@ -1332,16 +1332,16 @@ fn print_expr(s: ps, &&expr: @ast::expr) { print_fn_block_args(s, decl, *cap_clause); space(s.s); // } - assert body.node.stmts.is_empty(); - assert body.node.expr.is_some(); + assert (*body).node.stmts.is_empty(); + assert (*body).node.expr.is_some(); // we extract the block, so as not to create another set of boxes - match body.node.expr.get().node { - ast::expr_block(blk) => { - print_block_unclosed(s, blk); + match (*body).node.expr.get().node { + ast::expr_block(ref blk) => { + print_block_unclosed(s, (*blk)); } _ => { // this is a bare expression - print_expr(s, body.node.expr.get()); + print_expr(s, (*body).node.expr.get()); end(s); // need to close a box } } @@ -1356,12 +1356,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) { ast::expr_do_body(body) => { print_expr(s, body); } - ast::expr_block(blk) => { + ast::expr_block(ref blk) => { // containing cbox, will be closed by print-block at } cbox(s, indent_unit); // head-box, will be closed by print-block after { ibox(s, 0u); - print_block(s, blk); + print_block(s, (*blk)); } ast::expr_copy(e) => { word_space(s, ~"copy"); print_expr(s, e); } ast::expr_unary_move(e) => { @@ -1447,7 +1447,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { word_nbsp(s, ~"assert"); print_expr(s, expr); } - ast::expr_mac(m) => print_mac(s, m), + ast::expr_mac(ref m) => print_mac(s, (*m)), ast::expr_paren(e) => { popen(s); print_expr(s, e); @@ -1768,14 +1768,14 @@ fn print_type_params(s: ps, &¶ms: ~[ast::ty_param]) { fn print_meta_item(s: ps, &&item: @ast::meta_item) { ibox(s, indent_unit); match item.node { - ast::meta_word(name) => word(s.s, name), - ast::meta_name_value(name, value) => { - word_space(s, name); + ast::meta_word(ref name) => word(s.s, (*name)), + ast::meta_name_value(ref name, value) => { + word_space(s, (*name)); word_space(s, ~"="); print_literal(s, @value); } - ast::meta_list(name, items) => { - word(s.s, name); + ast::meta_list(ref name, items) => { + word(s.s, (*name)); popen(s); commasep(s, consistent, items, print_meta_item); pclose(s); @@ -1803,10 +1803,10 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) { word(s.s, ~"::*"); } - ast::view_path_list(path, idents, _) => { + ast::view_path_list(path, ref idents, _) => { print_path(s, path, false); word(s.s, ~"::{"); - do commasep(s, inconsistent, idents) |s, w| { + do commasep(s, inconsistent, (*idents)) |s, w| { print_ident(s, w.node.name); } word(s.s, ~"}"); @@ -1948,15 +1948,15 @@ fn maybe_print_trailing_comment(s: ps, span: codemap::span, let mut cm; match s.cm { Some(ccm) => cm = ccm, _ => return } match next_comment(s) { - Some(cmnt) => { - if cmnt.style != comments::trailing { return; } + Some(ref cmnt) => { + if (*cmnt).style != comments::trailing { return; } let span_line = cm.lookup_char_pos(span.hi); - let comment_line = cm.lookup_char_pos(cmnt.pos); - let mut next = cmnt.pos + BytePos(1u); + let comment_line = cm.lookup_char_pos((*cmnt).pos); + let mut next = (*cmnt).pos + BytePos(1u); match next_pos { None => (), Some(p) => next = p } - if span.hi < cmnt.pos && cmnt.pos < next && + if span.hi < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { - print_comment(s, cmnt); + print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } } @@ -1970,7 +1970,7 @@ fn print_remaining_comments(s: ps) { if next_comment(s).is_none() { hardbreak(s.s); } loop { match next_comment(s) { - Some(cmnt) => { print_comment(s, cmnt); s.cur_cmnt += 1u; } + Some(ref cmnt) => { print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } _ => break } } @@ -1979,8 +1979,8 @@ fn print_remaining_comments(s: ps) { fn print_literal(s: ps, &&lit: @ast::lit) { maybe_print_comment(s, lit.span.lo); match next_lit(s, lit.span.lo) { - Some(ltrl) => { - word(s.s, ltrl.lit); + Some(ref ltrl) => { + word(s.s, (*ltrl).lit); return; } _ => () @@ -2030,9 +2030,9 @@ fn lit_to_str(l: @ast::lit) -> ~str { fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> { match s.literals { - Some(lits) => { - while s.cur_lit < vec::len(lits) { - let ltrl = lits[s.cur_lit]; + Some(ref lits) => { + while s.cur_lit < vec::len((*lits)) { + let ltrl = (*lits)[s.cur_lit]; if ltrl.pos > pos { return None; } s.cur_lit += 1u; if ltrl.pos == pos { return Some(ltrl); } @@ -2046,9 +2046,9 @@ fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> { fn maybe_print_comment(s: ps, pos: BytePos) { loop { match next_comment(s) { - Some(cmnt) => { - if cmnt.pos < pos { - print_comment(s, cmnt); + Some(ref cmnt) => { + if (*cmnt).pos < pos { + print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } else { break; } } @@ -2117,9 +2117,9 @@ fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str { fn next_comment(s: ps) -> Option<comments::cmnt> { match s.comments { - Some(cmnts) => { - if s.cur_cmnt < vec::len(cmnts) { - return Some(cmnts[s.cur_cmnt]); + Some(ref cmnts) => { + if s.cur_cmnt < vec::len((*cmnts)) { + return Some((*cmnts)[s.cur_cmnt]); } else { return None::<comments::cmnt>; } } _ => return None::<comments::cmnt> diff --git a/src/libsyntax/syntax.rc b/src/libsyntax/syntax.rc index 55bac5f8275..66052767bd4 100644 --- a/src/libsyntax/syntax.rc +++ b/src/libsyntax/syntax.rc @@ -24,7 +24,7 @@ #[allow(vecs_implicitly_copyable)]; #[allow(non_camel_case_types)]; #[allow(deprecated_mode)]; -#[allow(deprecated_pattern)]; +#[warn(deprecated_pattern)]; extern mod core(vers = "0.5"); extern mod std(vers = "0.5"); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 6ca735469a6..4a399c5a0dd 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -125,10 +125,10 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { (v.visit_ty)(t, e, v); (v.visit_expr)(ex, e, v); } - item_fn(decl, purity, tp, body) => { + item_fn(decl, purity, tp, ref body) => { (v.visit_fn)(fk_item_fn(/* FIXME (#2543) */ copy i.ident, /* FIXME (#2543) */ copy tp, - purity), decl, body, + purity), decl, (*body), i.span, i.id, e, v); } item_mod(m) => (v.visit_mod)(m, i.span, i.id, e, v), @@ -140,9 +140,9 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { (v.visit_ty)(t, e, v); (v.visit_ty_params)(tps, e, v); } - item_enum(enum_definition, tps) => { + item_enum(ref enum_definition, tps) => { (v.visit_ty_params)(tps, e, v); - visit_enum_def(enum_definition, tps, e, v); + visit_enum_def((*enum_definition), tps, e, v); } item_impl(tps, traits, ty, methods) => { (v.visit_ty_params)(tps, e, v); @@ -158,14 +158,14 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { (v.visit_ty_params)(tps, e, v); (v.visit_struct_def)(struct_def, i.ident, tps, i.id, e, v); } - item_trait(tps, traits, methods) => { + item_trait(tps, traits, ref methods) => { (v.visit_ty_params)(tps, e, v); for traits.each |p| { visit_path(p.path, e, v); } - for methods.each |m| { + for (*methods).each |m| { (v.visit_trait_method)(*m, e, v); } } - item_mac(m) => visit_mac(m, e, v) + item_mac(ref m) => visit_mac((*m), e, v) } } @@ -180,8 +180,8 @@ fn visit_enum_def<E>(enum_definition: ast::enum_def, tps: ~[ast::ty_param], (v.visit_struct_def)(struct_def, vr.node.name, tps, vr.node.id, e, v); } - enum_variant_kind(enum_definition) => { - visit_enum_def(enum_definition, tps, e, v); + enum_variant_kind(ref enum_definition) => { + visit_enum_def((*enum_definition), tps, e, v); } } // Visit the disr expr if it exists @@ -197,7 +197,7 @@ fn visit_ty<E>(t: @Ty, e: E, v: vt<E>) { ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { (v.visit_ty)(mt.ty, e, v); } - ty_rec(flds) => for flds.each |f| { + ty_rec(ref flds) => for (*flds).each |f| { (v.visit_ty)(f.node.mt.ty, e, v); }, ty_tup(ts) => for ts.each |tt| { @@ -320,7 +320,7 @@ fn visit_ty_method<E>(m: ty_method, e: E, v: vt<E>) { fn visit_trait_method<E>(m: trait_method, e: E, v: vt<E>) { match m { - required(ty_m) => (v.visit_ty_method)(ty_m, e, v), + required(ref ty_m) => (v.visit_ty_method)((*ty_m), e, v), provided(m) => visit_method_helper(m, e, v) } } @@ -364,7 +364,7 @@ fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) { stmt_decl(d, _) => (v.visit_decl)(d, e, v), stmt_expr(ex, _) => (v.visit_expr)(ex, e, v), stmt_semi(ex, _) => (v.visit_expr)(ex, e, v), - stmt_mac(mac, _) => visit_mac(mac, e, v) + stmt_mac(ref mac, _) => visit_mac((*mac), e, v) } } @@ -404,13 +404,13 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { (v.visit_expr)(element, e, v); (v.visit_expr)(count, e, v); } - expr_rec(flds, base) => { - for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + expr_rec(ref flds, base) => { + for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } visit_expr_opt(base, e, v); } - expr_struct(p, flds, base) => { + expr_struct(p, ref flds, base) => { visit_path(p, e, v); - for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } visit_expr_opt(base, e, v); } expr_tup(elts) => for elts.each |el| { (v.visit_expr)(*el, e, v); }, @@ -431,29 +431,29 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { expr_assert(x) => (v.visit_expr)(x, e, v), expr_lit(_) => (), expr_cast(x, t) => { (v.visit_expr)(x, e, v); (v.visit_ty)(t, e, v); } - expr_if(x, b, eo) => { + expr_if(x, ref b, eo) => { (v.visit_expr)(x, e, v); - (v.visit_block)(b, e, v); + (v.visit_block)((*b), e, v); visit_expr_opt(eo, e, v); } - expr_while(x, b) => { + expr_while(x, ref b) => { (v.visit_expr)(x, e, v); - (v.visit_block)(b, e, v); + (v.visit_block)((*b), e, v); } - expr_loop(b, _) => (v.visit_block)(b, e, v), - expr_match(x, arms) => { + expr_loop(ref b, _) => (v.visit_block)((*b), e, v), + expr_match(x, ref arms) => { (v.visit_expr)(x, e, v); - for arms.each |a| { (v.visit_arm)(*a, e, v); } + for (*arms).each |a| { (v.visit_arm)(*a, e, v); } } - expr_fn(proto, decl, body, cap_clause) => { - (v.visit_fn)(fk_anon(proto, cap_clause), decl, body, + expr_fn(proto, decl, ref body, cap_clause) => { + (v.visit_fn)(fk_anon(proto, cap_clause), decl, (*body), ex.span, ex.id, e, v); } - expr_fn_block(decl, body, cap_clause) => { - (v.visit_fn)(fk_fn_block(cap_clause), decl, body, + expr_fn_block(decl, ref body, cap_clause) => { + (v.visit_fn)(fk_fn_block(cap_clause), decl, (*body), ex.span, ex.id, e, v); } - expr_block(b) => (v.visit_block)(b, e, v), + expr_block(ref b) => (v.visit_block)((*b), e, v), expr_assign(a, b) => { (v.visit_expr)(b, e, v); (v.visit_expr)(a, e, v); @@ -482,7 +482,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { (v.visit_expr)(lv, e, v); (v.visit_expr)(x, e, v); } - expr_mac(mac) => visit_mac(mac, e, v), + expr_mac(ref mac) => visit_mac((*mac), e, v), expr_paren(x) => (v.visit_expr)(x, e, v), } (v.visit_expr_post)(ex, e, v); |
