diff options
Diffstat (limited to 'src/libsyntax')
29 files changed, 361 insertions, 354 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 46fd7be656e..1e7e427a384 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -555,9 +555,9 @@ impl<T: to_bytes::IterBytes> inferable<T> : to_bytes::IterBytes { impl<T:cmp::Eq> inferable<T> : cmp::Eq { pure fn eq(&self, other: &inferable<T>) -> bool { match (*self) { - expl(e0a) => { + expl(ref e0a) => { match (*other) { - expl(e0b) => e0a == e0b, + expl(ref e0b) => (*e0a) == (*e0b), _ => false } } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index d13ec70a975..d04447e8d73 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -163,8 +163,8 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, cx.local_id += 1u; } match fk { - visit::fk_dtor(tps, attrs, self_id, parent_id) => { - let dt = @{node: {id: id, attrs: attrs, self_id: self_id, + visit::fk_dtor(tps, ref attrs, self_id, parent_id) => { + let dt = @{node: {id: id, attrs: (*attrs), self_id: self_id, body: /* FIXME (#2543) */ copy body}, span: sp}; cx.map.insert(id, node_dtor(/* FIXME (#2543) */ copy tps, dt, parent_id, @@ -219,8 +219,8 @@ fn map_item(i: @item, cx: ctx, v: vt) { map_method(impl_did, extend(cx, i.ident), *m, cx); } } - item_enum(enum_definition, _) => { - for enum_definition.variants.each |v| { + item_enum(ref enum_definition, _) => { + for (*enum_definition).variants.each |v| { cx.map.insert(v.node.id, node_variant( /* FIXME (#2543) */ copy *v, i, extend(cx, i.ident))); @@ -228,7 +228,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { } item_foreign_mod(nm) => { let abi = match attr::foreign_abi(i.attrs) { - either::Left(msg) => cx.diag.span_fatal(i.span, msg), + either::Left(ref msg) => cx.diag.span_fatal(i.span, (*msg)), either::Right(abi) => abi }; for nm.items.each |nitem| { @@ -249,7 +249,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { map_struct_def(struct_def, node_item(i, item_path), i.ident, i.id, cx, v); } - item_trait(_, traits, methods) => { + item_trait(_, traits, ref methods) => { // Map trait refs to their parent classes. This is // so we can find the self_ty for traits.each |p| { @@ -258,7 +258,7 @@ fn map_item(i: @item, cx: ctx, v: vt) { // encoding/decoding cx.map.insert(p.impl_id, node_item(i, item_path)); } - for methods.each |tm| { + for (*methods).each |tm| { let id = ast_util::trait_method_to_ty_method(*tm).id; let d_id = ast_util::local_def(i.id); cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); @@ -368,9 +368,9 @@ fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { fmt!("method %s in %s (id=%?)", *itr.get(m.ident), path_to_str(*path, itr), id) } - Some(node_variant(variant, _, path)) => { + Some(node_variant(ref variant, _, path)) => { fmt!("variant %s in %s (id=%?)", - *itr.get(variant.node.name), path_to_str(*path, itr), id) + *itr.get((*variant).node.name), path_to_str(*path, itr), id) } Some(node_expr(expr)) => { fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index c5a45248e8e..7365ad12ba9 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -205,8 +205,8 @@ fn is_exported(i: ident, m: _mod) -> bool { for m.items.each |it| { if it.ident == i { local = true; } match it.node { - item_enum(enum_definition, _) => - for enum_definition.variants.each |v| { + item_enum(ref enum_definition, _) => + for (*enum_definition).variants.each |v| { if v.node.name == i { local = true; parent_enum = Some(/* FIXME (#2543) */ copy it.ident); @@ -233,10 +233,10 @@ fn is_exported(i: ident, m: _mod) -> bool { } } - ast::view_path_list(path, ids, _) => { + ast::view_path_list(path, ref ids, _) => { if vec::len(path.idents) == 1u { if i == path.idents[0] { return true; } - for ids.each |id| { + for (*ids).each |id| { if id.node.name == i { return true; } } } else { @@ -314,7 +314,7 @@ fn public_methods(ms: ~[@method]) -> ~[@method] { // a default, pull out the useful fields to make a ty_method fn trait_method_to_ty_method(method: trait_method) -> ty_method { match method { - required(m) => m, + required(ref m) => (*m), provided(m) => { {ident: m.ident, attrs: m.attrs, purity: m.purity, decl: m.decl, @@ -329,7 +329,7 @@ fn split_trait_methods(trait_methods: ~[trait_method]) let mut reqd = ~[], provd = ~[]; for trait_methods.each |trt_method| { match *trt_method { - required(tm) => reqd.push(tm), + required(ref tm) => reqd.push((*tm)), provided(m) => provd.push(m) } }; @@ -364,7 +364,7 @@ impl inlined_item: inlined_item_utils { ii_item(i) => i.id, ii_foreign(i) => i.id, ii_method(_, m) => m.id, - ii_dtor(dtor, _, _, _) => dtor.node.id + ii_dtor(ref dtor, _, _, _) => (*dtor).node.id } } @@ -373,8 +373,8 @@ impl inlined_item: inlined_item_utils { ii_item(i) => (v.visit_item)(i, e, v), ii_foreign(i) => (v.visit_foreign_item)(i, e, v), ii_method(_, m) => visit::visit_method_helper(m, e, v), - ii_dtor(dtor, _, tps, parent_id) => { - visit::visit_class_dtor_helper(dtor, tps, parent_id, e, v); + ii_dtor(ref dtor, _, tps, parent_id) => { + visit::visit_class_dtor_helper((*dtor), tps, parent_id, e, v); } } } @@ -453,8 +453,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_item: fn@(i: @item) { vfn(i.id); match i.node { - item_enum(enum_definition, _) => - for enum_definition.variants.each |v| { vfn(v.node.id); }, + item_enum(ref enum_definition, _) => + for (*enum_definition).variants.each |v| { vfn(v.node.id); }, _ => () } }, @@ -643,7 +643,7 @@ impl Privacy : cmp::Eq { fn has_legacy_export_attr(attrs: &[attribute]) -> bool { for attrs.each |attribute| { match attribute.node.value.node { - meta_word(w) if w == ~"legacy_exports" => { + meta_word(ref w) if (*w) == ~"legacy_exports" => { return true; } _ => {} diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 1c7171ce787..79f78079784 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -124,9 +124,9 @@ fn get_attr_name(attr: ast::attribute) -> ~str { fn get_meta_item_name(meta: @ast::meta_item) -> ~str { match meta.node { - ast::meta_word(n) => n, - ast::meta_name_value(n, _) => n, - ast::meta_list(n, _) => n + ast::meta_word(ref n) => (*n), + ast::meta_name_value(ref n, _) => (*n), + ast::meta_list(ref n, _) => (*n) } } @@ -158,9 +158,9 @@ fn get_meta_item_list(meta: @ast::meta_item) -> Option<~[@ast::meta_item]> { */ fn get_name_value_str_pair(item: @ast::meta_item) -> Option<(~str, ~str)> { match attr::get_meta_item_value_str(item) { - Some(value) => { + Some(ref value) => { let name = attr::get_meta_item_name(item); - Some((name, value)) + Some((name, (*value))) } None => None } @@ -206,12 +206,12 @@ fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool { fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { return match a.node { - ast::meta_word(na) => match b.node { - ast::meta_word(nb) => na == nb, + ast::meta_word(ref na) => match b.node { + ast::meta_word(ref nb) => (*na) == (*nb), _ => false }, - ast::meta_name_value(na, va) => match b.node { - ast::meta_name_value(nb, vb) => na == nb && va.node == vb.node, + ast::meta_name_value(ref na, va) => match b.node { + ast::meta_name_value(ref nb, vb) => (*na) == (*nb) && va.node == vb.node, _ => false }, ast::meta_list(*) => { @@ -256,7 +256,7 @@ fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str) match last_meta_item_by_name(items, name) { Some(item) => match attr::get_meta_item_value_str(item) { - Some(value) => Some(value), + Some(ref value) => Some((*value)), None => None }, None => None @@ -281,9 +281,9 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] { pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool { pure fn key(m: &ast::meta_item) -> ~str { match m.node { - ast::meta_word(name) => name, - ast::meta_name_value(name, _) => name, - ast::meta_list(name, _) => name + ast::meta_word(ref name) => (*name), + ast::meta_name_value(ref name, _) => (*name), + ast::meta_list(ref name, _) => (*name) } } key(*ma) <= key(*mb) @@ -334,8 +334,8 @@ fn foreign_abi(attrs: ~[ast::attribute]) -> Either<~str, ast::foreign_abi> { option::Some(~"stdcall") => { either::Right(ast::foreign_abi_stdcall) } - option::Some(t) => { - either::Left(~"unsupported abi: " + t) + option::Some(ref t) => { + either::Left(~"unsupported abi: " + (*t)) } }; } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index ffc786274b7..5218a753ae0 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -308,10 +308,10 @@ pub impl CodeMap { self.lookup_char_pos_adj( sp.lo + (pos - loc.file.start_pos)) } - FssExternal(eloc) => { - {filename: /* FIXME (#2543) */ copy eloc.filename, - line: eloc.line + loc.line - 1u, - col: if loc.line == 1u {eloc.col + loc.col} else {loc.col}, + FssExternal(ref eloc) => { + {filename: /* FIXME (#2543) */ copy (*eloc).filename, + line: (*eloc).line + loc.line - 1u, + col: if loc.line == 1u {(*eloc).col + loc.col} else {loc.col}, file: None} } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 6aaad755b8b..e42bb00c212 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -285,7 +285,7 @@ fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) { fn expect<T: Copy>(diag: span_handler, opt: Option<T>, msg: fn() -> ~str) -> T { match opt { - Some(t) => t, + Some(ref t) => (*t), None => diag.handler().bug(msg()) } } diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 1242d528194..a42a51fd302 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -120,12 +120,12 @@ fn expand_auto_serialize( do vec::flat_map(in_items) |item| { if item.attrs.any(is_auto_serialize) { match item.node { - ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => { + ast::item_ty(@{node: ast::ty_rec(ref fields), _}, tps) => { let ser_impl = mk_rec_ser_impl( cx, item.span, item.ident, - fields, + (*fields), tps ); @@ -142,12 +142,12 @@ fn expand_auto_serialize( ~[filter_attrs(*item), ser_impl] }, - ast::item_enum(enum_def, tps) => { + ast::item_enum(ref enum_def, tps) => { let ser_impl = mk_enum_ser_impl( cx, item.span, item.ident, - enum_def, + (*enum_def), tps ); @@ -184,12 +184,12 @@ fn expand_auto_deserialize( do vec::flat_map(in_items) |item| { if item.attrs.any(is_auto_deserialize) { match item.node { - ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => { + ast::item_ty(@{node: ast::ty_rec(ref fields), _}, tps) => { let deser_impl = mk_rec_deser_impl( cx, item.span, item.ident, - fields, + (*fields), tps ); @@ -206,12 +206,12 @@ fn expand_auto_deserialize( ~[filter_attrs(*item), deser_impl] }, - ast::item_enum(enum_def, tps) => { + ast::item_enum(ref enum_def, tps) => { let deser_impl = mk_enum_deser_impl( cx, item.span, item.ident, - enum_def, + (*enum_def), tps ); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 79dbbbe0b72..630ba3b8749 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -202,12 +202,12 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn mod_path() -> ~[ast::ident] { return self.mod_path; } fn bt_push(ei: codemap::ExpnInfo) { match ei { - ExpandedFrom({call_site: cs, callie: callie}) => { + ExpandedFrom({call_site: cs, callie: ref callie}) => { self.backtrace = Some(@ExpandedFrom({ call_site: span {lo: cs.lo, hi: cs.hi, expn_info: self.backtrace}, - callie: callie})); + callie: (*callie)})); } } } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 1371cd30308..51db63c819a 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -28,7 +28,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let var = expr_to_str(cx, args[0], ~"env! requires a string"); match os::getenv(var) { option::None => return mk_uniq_str(cx, sp, ~""), - option::Some(s) => return mk_uniq_str(cx, sp, s) + option::Some(ref s) => return mk_uniq_str(cx, sp, (*s)) } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index a9fdcc18661..6efca050fa5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -29,9 +29,9 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, return match e { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. - expr_mac(mac) => { + expr_mac(ref mac) => { - match mac.node { + match (*mac).node { // Old-style macros. For compatibility, will erase this whole // block once we've transitioned. mac_invoc(pth, args, body) => { @@ -50,7 +50,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("%s can only be used as a decorator", *extname)); } Some(normal({expander: exp, span: exp_sp})) => { - let expanded = exp(cx, mac.span, args, body); + let expanded = exp(cx, (*mac).span, args, body); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); @@ -61,7 +61,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, (fully_expanded, s) } Some(macro_defining(ext)) => { - let named_extension = ext(cx, mac.span, args, body); + let named_extension = ext(cx, (*mac).span, args, body); exts.insert(named_extension.name, named_extension.ext); (ast::expr_rec(~[], None), s) } @@ -79,7 +79,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, // Token-tree macros, these will be the only case when we're // finished transitioning. - mac_invoc_tt(pth, tts) => { + mac_invoc_tt(pth, ref tts) => { assert (vec::len(pth.idents) == 1u); /* using idents and token::special_idents would make the the macro names be hygienic */ @@ -90,7 +90,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("macro undefined: '%s'", *extname)) } Some(normal_tt({expander: exp, span: exp_sp})) => { - let expanded = match exp(cx, mac.span, tts) { + let expanded = match exp(cx, (*mac).span, (*tts)) { mr_expr(e) => e, mr_any(expr_maker,_,_) => expr_maker(), _ => cx.span_fatal( @@ -109,8 +109,8 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, Some(normal({expander: exp, span: exp_sp})) => { //convert the new-style invoc for the old-style macro let arg = base::tt_args_to_original_flavor(cx, pth.span, - tts); - let expanded = exp(cx, mac.span, arg, None); + (*tts)); + let expanded = exp(cx, (*mac).span, arg, None); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); @@ -128,7 +128,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, } } - _ => cx.span_bug(mac.span, ~"naked syntactic bit") + _ => cx.span_bug((*mac).span, ~"naked syntactic bit") } } _ => orig(e, s, fld) @@ -158,9 +158,9 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, let new_items = do vec::flat_map(module_.items) |item| { do vec::foldr(item.attrs, ~[*item]) |attr, items| { let mname = match attr.node.value.node { - ast::meta_word(n) => n, - ast::meta_name_value(n, _) => n, - ast::meta_list(n, _) => n + ast::meta_word(ref n) => (*n), + ast::meta_name_value(ref n, _) => (*n), + ast::meta_list(ref n, _) => (*n) }; match exts.find(mname) { None | Some(normal(_)) | Some(macro_defining(_)) @@ -227,10 +227,10 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, &&it: @ast::item, fld: ast_fold) -> Option<@ast::item> { let (pth, tts) = biased_match!( - (it.node) ~ (item_mac({node: mac_invoc_tt(pth, tts), _})) else { + (it.node) ~ (item_mac({node: mac_invoc_tt(pth, ref tts), _})) else { cx.span_bug(it.span, ~"invalid item macro invocation") }; - => (pth, tts) + => (pth, (*tts)) ); let extname = cx.parse_sess().interner.get(pth.idents[0]); @@ -238,22 +238,22 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, None => cx.span_fatal(pth.span, fmt!("macro undefined: '%s!'", *extname)), - Some(normal_tt(expand)) => { + Some(normal_tt(ref expand)) => { if it.ident != parse::token::special_idents::invalid { cx.span_fatal(pth.span, fmt!("macro %s! expects no ident argument, \ given '%s'", *extname, *cx.parse_sess().interner.get(it.ident))); } - ((expand.expander)(cx, it.span, tts), expand.span) + (((*expand).expander)(cx, it.span, tts), (*expand).span) } - Some(item_tt(expand)) => { + Some(item_tt(ref expand)) => { if it.ident == parse::token::special_idents::invalid { cx.span_fatal(pth.span, fmt!("macro %s! expects an ident argument", *extname)); } - ((expand.expander)(cx, it.span, it.ident, tts), expand.span) + (((*expand).expander)(cx, it.span, it.ident, tts), (*expand).span) } _ => cx.span_fatal( it.span, fmt!("%s! is not legal in item position", *extname)) @@ -268,8 +268,8 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, + *extname), mr_any(_, item_maker, _) => option::chain(item_maker(), |i| {fld.fold_item(i)}), - mr_def(mdef) => { - exts.insert(mdef.name, mdef.ext); + mr_def(ref mdef) => { + exts.insert((*mdef).name, (*mdef).ext); None } }; @@ -283,11 +283,11 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, -> (stmt_, span) { let (mac, pth, tts, semi) = biased_match! ( - (s) ~ (stmt_mac(mac, semi)) else return orig(s, sp, fld); - (mac.node) ~ (mac_invoc_tt(pth, tts)) else { - cx.span_bug(mac.span, ~"naked syntactic bit") + (s) ~ (stmt_mac(ref mac, semi)) else return orig(s, sp, fld); + ((*mac).node) ~ (mac_invoc_tt(pth, ref tts)) else { + cx.span_bug((*mac).span, ~"naked syntactic bit") }; - => (mac, pth, tts, semi)); + => ((*mac), pth, (*tts), semi)); assert(vec::len(pth.idents) == 1u); let extname = cx.parse_sess().interner.get(pth.idents[0]); diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index d6ea6791284..e0d3bd03f42 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -255,8 +255,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let nargs = args.len(); for pieces.each |pc| { match *pc { - PieceString(s) => { - piece_exprs.push(mk_uniq_str(cx, fmt_sp, s)) + PieceString(ref s) => { + piece_exprs.push(mk_uniq_str(cx, fmt_sp, (*s))) } PieceConv(conv) => { n += 1u; diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index cfe4a3d19ac..cd76655fef6 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -50,18 +50,18 @@ impl ext_ctxt: proto::visitor<(), (), ()> { fn visit_message(name: ~str, _span: span, _tys: &[@ast::Ty], this: state, next: next_state) { match next { - Some({state: next, tys: next_tys}) => { + Some({state: ref next, tys: next_tys}) => { let proto = this.proto; - if !proto.has_state(next) { + if !proto.has_state((*next)) { // This should be a span fatal, but then we need to // track span information. self.span_err( - proto.get_state(next).span, + proto.get_state((*next)).span, fmt!("message %s steps to undefined state, %s", - name, next)); + name, (*next))); } else { - let next = proto.get_state(next); + let next = proto.get_state((*next)); if next.ty_params.len() != next_tys.len() { self.span_err( diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 87db2b1cf63..1c4dd197105 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -55,10 +55,10 @@ impl message: gen_send { fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item { debug!("pipec: gen_send"); match self { - message(_id, span, tys, this, - Some({state: next, tys: next_tys})) => { + message(ref _id, span, tys, this, + Some({state: ref next, tys: next_tys})) => { debug!("pipec: next state exists"); - let next = this.proto.get_state(next); + let next = this.proto.get_state((*next)); assert next_tys.len() == next.ty_params.len(); let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str())); @@ -139,7 +139,7 @@ impl message: gen_send { cx.expr_block(body)) } - message(_id, span, tys, this, None) => { + message(ref _id, span, tys, this, None) => { debug!("pipec: no next state"); let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str())); @@ -220,8 +220,8 @@ impl state: to_type_decls { let message(name, span, tys, this, next) = *m; let tys = match next { - Some({state: next, tys: next_tys}) => { - let next = this.proto.get_state(next); + Some({state: ref next, tys: next_tys}) => { + let next = this.proto.get_state((*next)); let next_name = cx.str_of(next.data_name()); let dir = match this.dir { diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index f8c4648dd4a..af75c9e71dc 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -55,7 +55,7 @@ enum message { impl message { fn name() -> ~str { match self { - message(id, _, _, _, _) => id + message(ref id, _, _, _, _) => (*id) } } @@ -113,8 +113,8 @@ impl state { fn reachable(f: fn(state) -> bool) { for self.messages.each |m| { match *m { - message(_, _, _, _, Some({state: id, _})) => { - let state = self.proto.get_state(id); + message(_, _, _, _, Some({state: ref id, _})) => { + let state = self.proto.get_state((*id)); if !f(state) { break } } _ => () diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index 346798c9029..e13dfe750b7 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -69,7 +69,7 @@ impl @ast::expr: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);} fn extract_mac() -> Option<ast::mac_> { match (self.node) { - ast::expr_mac({node: mac, _}) => Some(mac), + ast::expr_mac({node: ref mac, _}) => Some((*mac)), _ => None } } @@ -84,7 +84,7 @@ impl @ast::Ty: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);} fn extract_mac() -> Option<ast::mac_> { match (self.node) { - ast::ty_mac({node: mac, _}) => Some(mac), + ast::ty_mac({node: ref mac, _}) => Some((*mac)), _ => None } } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 27760e4117f..b2e651c7e33 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -104,15 +104,15 @@ fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { let e_expn_info = match sp.expn_info { None => build::mk_path(cx, qsp, ids_ext(cx, ~[~"None"])), - Some(@codemap::ExpandedFrom(cr)) => { + Some(@codemap::ExpandedFrom(ref cr)) => { let e_callee = build::mk_rec_e( cx, qsp, ~[{ident: id_ext(cx, ~"name"), ex: build::mk_uniq_str(cx, qsp, - cr.callie.name)}, + (*cr).callie.name)}, {ident: id_ext(cx, ~"span"), - ex: mk_option_span(cx, qsp, cr.callie.span)}]); + ex: mk_option_span(cx, qsp, (*cr).callie.span)}]); let e_expn_info_ = build::mk_call( @@ -121,7 +121,7 @@ fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { ~[build::mk_rec_e( cx, qsp, ~[{ident: id_ext(cx, ~"call_site"), - ex: mk_span(cx, qsp, cr.call_site)}, + ex: mk_span(cx, qsp, (*cr).call_site)}, {ident: id_ext(cx, ~"callie"), ex: e_callee}])]); @@ -327,20 +327,20 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr { fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree) -> @ast::expr { match *tt { - ast::tt_tok(sp, tok) => { + ast::tt_tok(sp, ref tok) => { let e_tok = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_tok"]), ~[mk_span(cx, sp, sp), - mk_token(cx, sp, tok)]); + mk_token(cx, sp, (*tok))]); build::mk_uniq_vec_e(cx, sp, ~[e_tok]) } - ast::tt_delim(tts) => { + ast::tt_delim(ref tts) => { let e_delim = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_delim"]), - ~[mk_tts(cx, sp, tts)]); + ~[mk_tts(cx, sp, (*tts))]); build::mk_uniq_vec_e(cx, sp, ~[e_delim]) } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index 5e47dee548f..1bf24670aab 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -55,8 +55,8 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! { x.span, ~"this argument is an ident, expected " + expected), match_ty(x) => cx.span_fatal( x.span, ~"this argument is a type, expected " + expected), - match_block(x) => cx.span_fatal( - x.span, ~"this argument is a block, expected " + expected), + match_block(ref x) => cx.span_fatal( + (*x).span, ~"this argument is a block, expected " + expected), match_exact => cx.bug(~"what is a match_exact doing in a bindings?") } } @@ -76,10 +76,10 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) -> let mut res = None; for elts.each |elt| { match elt.node { - expr_mac(m) => match m.node { + expr_mac(ref m) => match (*m).node { ast::mac_ellipsis => { if res.is_some() { - cx.span_fatal(m.span, ~"only one ellipsis allowed"); + cx.span_fatal((*m).span, ~"only one ellipsis allowed"); } res = Some({pre: vec::slice(elts, 0u, idx - 1u), @@ -104,7 +104,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> for v.each |elem| { match f(*elem) { None => return None, - Some(fv) => res.push(fv) + Some(ref fv) => res.push((*fv)) } } return Some(res); @@ -112,7 +112,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result { match ad { - leaf(x) => return f(x), + leaf(ref x) => return f((*x)), seq(ads, span) => match option_flatten_map(|x| a_d_map(x, f), *ads) { None => return None, Some(ts) => return Some(seq(@ts, span)) @@ -124,7 +124,7 @@ fn compose_sels(s1: selector, s2: selector) -> selector { fn scomp(s1: selector, s2: selector, m: matchable) -> match_result { return match s1(m) { None => None, - Some(matches) => a_d_map(matches, s2) + Some(ref matches) => a_d_map((*matches), s2) } } return { |x| scomp(s1, s2, x) }; @@ -172,7 +172,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> Option<bindings> { for b.real_binders.each |key, val| { match val(match_expr(e)) { None => never_mind = true, - Some(mtc) => { res.insert(key, mtc); } + Some(ref mtc) => { res.insert(key, (*mtc)); } } }; //HACK: `ret` doesn't work in `for each` @@ -231,14 +231,14 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: Option<arb_depth<matchable>>, idx_path: @mut ~[uint]) -> Option<matchable> { match mmaybe { None => return None, - Some(m) => { - return match follow(m, *idx_path) { + Some(ref m) => { + return match follow((*m), *idx_path) { seq(_, sp) => { cx.span_fatal(sp, ~"syntax matched under ... but not " + ~"used that way.") } - leaf(m) => return Some(m) + leaf(ref m) => return Some((*m)) } } } @@ -337,7 +337,7 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], &&i: ident, _fld: ast_fold) -> ident { return match follow_for_trans(cx, b.find(i), idx_path) { Some(match_ident(a_id)) => a_id.node, - Some(m) => match_error(cx, m, ~"an identifier"), + Some(ref m) => match_error(cx, (*m), ~"an identifier"), None => i } } @@ -353,7 +353,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], rp: None, types: ~[]} } Some(match_path(a_pth)) => *a_pth, - Some(m) => match_error(cx, m, ~"a path"), + Some(ref m) => match_error(cx, (*m), ~"a path"), None => p } } @@ -380,7 +380,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], } Some(match_path(a_pth)) => (expr_path(a_pth), s), Some(match_expr(a_exp)) => (a_exp.node, a_exp.span), - Some(m) => match_error(cx, m, ~"an expression"), + Some(ref m) => match_error(cx, (*m), ~"an expression"), None => orig(e, s, fld) } } @@ -399,7 +399,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], Some(id) => { match follow_for_trans(cx, b.find(id), idx_path) { Some(match_ty(ty)) => (ty.node, ty.span), - Some(m) => match_error(cx, m, ~"a type"), + Some(ref m) => match_error(cx, (*m), ~"a type"), None => orig(t, s, fld) } } @@ -422,10 +422,10 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], return match block_to_ident(blk) { Some(id) => { match follow_for_trans(cx, b.find(id), idx_path) { - Some(match_block(new_blk)) => (new_blk.node, new_blk.span), + Some(match_block(ref new_blk)) => ((*new_blk).node, (*new_blk).span), // possibly allow promotion of ident/path/expr to blocks? - Some(m) => match_error(cx, m, ~"a block"), + Some(ref m) => match_error(cx, (*m), ~"a block"), None => orig(blk, s, fld) } } @@ -468,8 +468,8 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { } } /* FIXME (#2251): handle embedded types and blocks, at least */ - expr_mac(mac) => { - p_t_s_r_mac(cx, mac, s, b); + expr_mac(ref mac) => { + p_t_s_r_mac(cx, (*mac), s, b); } _ => { fn select(cx: ext_ctxt, m: matchable, pat: @expr) -> @@ -548,7 +548,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) { fn_m: fn(ast::mac) -> match_result) -> match_result { return match m { match_expr(e) => match e.node { - expr_mac(mac) => fn_m(mac), + expr_mac(ref mac) => fn_m((*mac)), _ => None }, _ => cx.bug(~"broken traversal in p_t_s_r") @@ -659,15 +659,15 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, match elts[0u].node { - expr_mac(mac) => { - match mac.node { + expr_mac(ref mac) => { + match (*mac).node { mac_invoc(pth, invoc_arg, _) => { match path_to_ident(pth) { Some(id) => { let id_str = cx.str_of(id); match macro_name { None => macro_name = Some(id_str), - Some(other_id) => if id_str != other_id { + Some(ref other_id) => if id_str != (*other_id) { cx.span_fatal(pth.span, ~"macro name must be " + ~"consistent"); @@ -679,7 +679,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, } let arg = match invoc_arg { Some(arg) => arg, - None => cx.span_fatal(mac.span, + None => cx.span_fatal((*mac).span, ~"macro must have arguments") }; clauses.push(@{params: pattern_to_selectors(cx, arg), @@ -689,7 +689,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, // the macro arg situation) } _ => { - cx.span_bug(mac.span, ~"undocumented invariant in \ + cx.span_bug((*mac).span, ~"undocumented invariant in \ add_extension"); } } @@ -712,7 +712,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, return {name: match macro_name { - Some(id) => id, + Some(ref id) => (*id), None => cx.span_fatal(sp, ~"macro definition must have " + ~"at least one clause") }, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 0e1eb2ee2df..aa97646c054 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -83,8 +83,8 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file))); match res { result::Ok(_) => { /* Continue. */ } - result::Err(e) => { - cx.parse_sess().span_diagnostic.handler().fatal(e); + result::Err(ref e) => { + cx.parse_sess().span_diagnostic.handler().fatal((*e)); } } @@ -104,8 +104,8 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, }); return mk_base_vec_e(cx, sp, u8_exprs); } - result::Err(e) => { - cx.parse_sess().span_diagnostic.handler().fatal(e) + result::Err(ref e) => { + cx.parse_sess().span_diagnostic.handler().fatal((*e)) } } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 44a3774ddd0..e51800b8a61 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -117,8 +117,8 @@ type matcher_pos = ~{ }; fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos { - match mpu { - matcher_pos_up(Some(mp)) => copy mp, + match &mpu { + &matcher_pos_up(Some(ref mp)) => copy (*mp), _ => fail } } @@ -127,7 +127,7 @@ fn count_names(ms: &[matcher]) -> uint { vec::foldl(0u, ms, |ct, m| { ct + match m.node { match_tok(_) => 0u, - match_seq(more_ms, _, _, _, _) => count_names(more_ms), + match_seq(ref more_ms, _, _, _, _) => count_names((*more_ms)), match_nonterminal(_,_,_) => 1u }}) } @@ -184,8 +184,8 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) ret_val: HashMap<ident, @named_match>) { match m { {node: match_tok(_), span: _} => (), - {node: match_seq(more_ms, _, _, _, _), span: _} => { - for more_ms.each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; + {node: match_seq(ref more_ms, _, _, _, _), span: _} => { + for (*more_ms).each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; } {node: match_nonterminal(bind_name, _, idx), span: sp} => { if ret_val.contains_key(bind_name) { @@ -211,8 +211,8 @@ fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) -> HashMap<ident, @named_match> { match parse(sess, cfg, rdr, ms) { success(m) => m, - failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), - error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) + failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)), + error(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)) } } @@ -274,8 +274,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) // the *_t vars are workarounds for the lack of unary move match copy ei.sep { - Some(t) if idx == len => { // we need a separator - if tok == t { //pass the separator + Some(ref t) if idx == len => { // we need a separator + if tok == (*t) { //pass the separator let ei_t = move ei; ei_t.idx += 1; next_eis.push(move ei_t); @@ -293,7 +293,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } else { match copy ei.elts[idx].node { /* need to descend into sequence */ - match_seq(matchers, sep, zero_ok, + match_seq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { if zero_ok { let new_ei = copy ei; @@ -310,7 +310,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) |_m| DVec::<@named_match>()); let ei_t = move ei; cur_eis.push(~{ - elts: matchers, sep: sep, mut idx: 0u, + elts: (*matchers), sep: (*sep), mut idx: 0u, mut up: matcher_pos_up(Some(move ei_t)), matches: move matches, match_lo: match_idx_lo, match_hi: match_idx_hi, @@ -318,9 +318,9 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) }); } match_nonterminal(_,_,_) => { bb_eis.push(move ei) } - match_tok(t) => { + match_tok(ref t) => { let ei_t = move ei; - if t == tok { + if (*t) == tok { ei_t.idx += 1; next_eis.push(move ei_t); } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 0767a3cce83..09415703260 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -84,17 +84,17 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, for lhses.eachi() |i, lhs| { // try each arm's matchers match *lhs { - @matched_nonterminal(nt_matchers(mtcs)) => { + @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating let arg_rdr = new_tt_reader(s_d, itr, None, arg) as reader; - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs) { + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { success(named_matches) => { let rhs = match rhses[i] { // okay, what's your transcriber? - @matched_nonterminal(nt_tt(@tt)) => { - match tt { + @matched_nonterminal(nt_tt(@ref tt)) => { + match (*tt) { // cut off delimiters; don't parse 'em - tt_delim(tts) => tts.slice(1u,tts.len()-1u), + tt_delim(ref tts) => (*tts).slice(1u,(*tts).len()-1u), _ => cx.span_fatal( sp, ~"macro rhs must be delimited") } @@ -113,11 +113,11 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, || p.parse_item(~[/* no attrs*/]), || p.parse_stmt(~[/* no attrs*/])); } - failure(sp, msg) => if sp.lo >= best_fail_spot.lo { + failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; - best_fail_msg = msg; + best_fail_msg = (*msg); }, - error(sp, msg) => cx.span_fatal(sp, msg) + error(sp, ref msg) => cx.span_fatal(sp, (*msg)) } } _ => cx.bug(~"non-matcher found in parsed lhses") diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index eeb8b068b5b..3d901039188 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -130,8 +130,8 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis { } } match t { - tt_delim(tts) | tt_seq(_, tts, _, _) => { - vec::foldl(lis_unconstrained, tts, |lis, tt| + tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => { + vec::foldl(lis_unconstrained, (*tts), |lis, tt| lis_merge(lis, lockstep_iter_size(*tt, r), r)) } tt_tok(*) => lis_unconstrained, @@ -170,8 +170,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { r.cur.idx = 0u; r.repeat_idx[r.repeat_idx.len() - 1u] += 1u; match r.cur.sep { - Some(tk) => { - r.cur_tok = tk; /* repeat same span, I guess */ + Some(ref tk) => { + r.cur_tok = (*tk); /* repeat same span, I guess */ return ret_val; } None => () @@ -181,27 +181,27 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { loop { /* because it's easiest, this handles `tt_delim` not starting with a `tt_tok`, even though it won't happen */ match r.cur.readme[r.cur.idx] { - tt_delim(tts) => { - r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: false, + tt_delim(ref tts) => { + r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: false, sep: None, up: tt_frame_up(option::Some(r.cur)) }; // if this could be 0-length, we'd need to potentially recur here } - tt_tok(sp, tok) => { - r.cur_span = sp; r.cur_tok = tok; + tt_tok(sp, ref tok) => { + r.cur_span = sp; r.cur_tok = (*tok); r.cur.idx += 1u; return ret_val; } - tt_seq(sp, tts, sep, zerok) => { - match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) { + tt_seq(sp, ref tts, ref sep, zerok) => { + match lockstep_iter_size(tt_seq(sp, (*tts), (*sep), zerok), r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ ~"attempted to repeat an expression containing no syntax \ variables matched as repeating at this depth"); } - lis_contradiction(msg) => { /* FIXME #2887 blame macro invoker + lis_contradiction(ref msg) => { /* FIXME #2887 blame macro invoker instead*/ - r.sp_diag.span_fatal(sp, msg); + r.sp_diag.span_fatal(sp, (*msg)); } lis_constraint(len, _) => { if len == 0 { @@ -217,8 +217,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { } else { r.repeat_len.push(len); r.repeat_idx.push(0u); - r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: true, - sep: sep, up: tt_frame_up(option::Some(r.cur))}; + r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: true, + sep: (*sep), up: tt_frame_up(option::Some(r.cur))}; } } } @@ -234,8 +234,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { r.cur.idx += 1u; return ret_val; } - matched_nonterminal(other_whole_nt) => { - r.cur_span = sp; r.cur_tok = INTERPOLATED(other_whole_nt); + matched_nonterminal(ref other_whole_nt) => { + r.cur_span = sp; r.cur_tok = INTERPOLATED((*other_whole_nt)); r.cur.idx += 1u; return ret_val; } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 15435f48421..39da8531da8 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -89,14 +89,14 @@ type ast_fold_precursor = @{ fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item { return @{node: match mi.node { - meta_word(id) => meta_word(id), - meta_list(id, mis) => { + meta_word(ref id) => meta_word((*id)), + meta_list(ref id, mis) => { let fold_meta_item = |x|fold_meta_item_(x, fld); - meta_list(/* FIXME: (#2543) */ copy id, + meta_list(/* FIXME: (#2543) */ copy (*id), vec::map(mis, |e| fold_meta_item(*e))) } - meta_name_value(id, s) => { - meta_name_value(id, /* FIXME (#2543) */ copy s) + meta_name_value(ref id, s) => { + meta_name_value((*id), /* FIXME (#2543) */ copy s) } }, span: fld.new_span(mi.span)}; @@ -216,21 +216,21 @@ fn noop_fold_struct_field(&&sf: @struct_field, fld: ast_fold) fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { return match i { item_const(t, e) => item_const(fld.fold_ty(t), fld.fold_expr(e)), - item_fn(decl, purity, typms, body) => { + item_fn(decl, purity, typms, ref body) => { item_fn(fold_fn_decl(decl, fld), purity, fold_ty_params(typms, fld), - fld.fold_block(body)) + fld.fold_block((*body))) } item_mod(m) => item_mod(fld.fold_mod(m)), item_foreign_mod(nm) => item_foreign_mod(fld.fold_foreign_mod(nm)), item_ty(t, typms) => item_ty(fld.fold_ty(t), fold_ty_params(typms, fld)), - item_enum(enum_definition, typms) => { + item_enum(ref enum_definition, typms) => { item_enum(ast::enum_def({ - variants: vec::map(enum_definition.variants, + variants: vec::map((*enum_definition).variants, |x| fld.fold_variant(*x)), - common: option::map(&enum_definition.common, + common: option::map(&(*enum_definition).common, |x| fold_struct_def(*x, fld)) }), fold_ty_params(typms, fld)) } @@ -244,8 +244,8 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { fld.fold_ty(ty), vec::map(*methods, |x| fld.fold_method(*x))) } - item_trait(tps, traits, methods) => { - let methods = do methods.map |method| { + item_trait(tps, traits, ref methods) => { + let methods = do (*methods).map |method| { match *method { required(*) => copy *method, provided(method) => provided(fld.fold_method(method)) @@ -255,9 +255,9 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { vec::map(traits, |p| fold_trait_ref(*p, fld)), move methods) } - item_mac(m) => { + item_mac(ref m) => { // FIXME #2888: we might actually want to do something here. - item_mac(m) + item_mac((*m)) } }; } @@ -320,7 +320,7 @@ fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ { stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), - stmt_mac(mac, semi) => stmt_mac(fold_mac(mac), semi) + stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) }; } @@ -409,8 +409,8 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { } expr_repeat(expr, count, mutt) => expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt), - expr_rec(fields, maybe_expr) => { - expr_rec(vec::map(fields, |x| fold_field(*x)), + expr_rec(ref fields, maybe_expr) => { + expr_rec(vec::map((*fields), |x| fold_field(*x)), option::map(&maybe_expr, |x| fld.fold_expr(*x))) } expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(*x))), @@ -435,35 +435,35 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { expr_lit(_) => copy e, expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty), expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)), - expr_if(cond, tr, fl) => { - expr_if(fld.fold_expr(cond), fld.fold_block(tr), + expr_if(cond, ref tr, fl) => { + expr_if(fld.fold_expr(cond), fld.fold_block((*tr)), option::map(&fl, |x| fld.fold_expr(*x))) } - expr_while(cond, body) => { - expr_while(fld.fold_expr(cond), fld.fold_block(body)) + expr_while(cond, ref body) => { + expr_while(fld.fold_expr(cond), fld.fold_block((*body))) } - expr_loop(body, opt_ident) => { - expr_loop(fld.fold_block(body), + expr_loop(ref body, opt_ident) => { + expr_loop(fld.fold_block((*body)), option::map(&opt_ident, |x| fld.fold_ident(*x))) } - expr_match(expr, arms) => { + expr_match(expr, ref arms) => { expr_match(fld.fold_expr(expr), - vec::map(arms, |x| fld.fold_arm(*x))) + vec::map((*arms), |x| fld.fold_arm(*x))) } - expr_fn(proto, decl, body, captures) => { + expr_fn(proto, decl, ref body, captures) => { expr_fn(proto, fold_fn_decl(decl, fld), - fld.fold_block(body), + fld.fold_block((*body)), @((*captures).map(|cap_item| { @({id: fld.new_id(cap_item.id), ..**cap_item})}))) } - expr_fn_block(decl, body, captures) => { - expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block(body), + expr_fn_block(decl, ref body, captures) => { + expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block((*body)), @((*captures).map(|cap_item| { @({id: fld.new_id(cap_item.id), ..**cap_item})}))) } - expr_block(blk) => expr_block(fld.fold_block(blk)), + expr_block(ref blk) => expr_block(fld.fold_block((*blk))), expr_copy(e) => expr_copy(fld.fold_expr(e)), expr_unary_move(e) => expr_unary_move(fld.fold_expr(e)), expr_assign(el, er) => { @@ -492,10 +492,10 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv), fld.fold_expr(e)), expr_assert(e) => expr_assert(fld.fold_expr(e)), - expr_mac(mac) => expr_mac(fold_mac(mac)), - expr_struct(path, fields, maybe_expr) => { + expr_mac(ref mac) => expr_mac(fold_mac((*mac))), + expr_struct(path, ref fields, maybe_expr) => { expr_struct(fld.fold_path(path), - vec::map(fields, |x| fold_field(*x)), + vec::map((*fields), |x| fold_field(*x)), option::map(&maybe_expr, |x| fld.fold_expr(*x))) }, expr_paren(ex) => expr_paren(fld.fold_expr(ex)) @@ -519,7 +519,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ { ty_vec(mt) => ty_vec(fold_mt(mt, fld)), ty_ptr(mt) => ty_ptr(fold_mt(mt, fld)), ty_rptr(region, mt) => ty_rptr(region, fold_mt(mt, fld)), - ty_rec(fields) => ty_rec(vec::map(fields, |f| fold_field(*f, fld))), + ty_rec(ref fields) => ty_rec(vec::map((*fields), |f| fold_field(*f, fld))), ty_fn(f) => ty_fn(@TyFn { proto: f.proto, @@ -533,7 +533,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ { ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)), ty_fixed_length_vec(mt, vs) => ty_fixed_length_vec(fold_mt(mt, fld), vs), - ty_mac(mac) => ty_mac(fold_mac(mac)) + ty_mac(ref mac) => ty_mac(fold_mac((*mac))) } } @@ -579,10 +579,10 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { }) } - enum_variant_kind(enum_definition) => { - let variants = vec::map(enum_definition.variants, + enum_variant_kind(ref enum_definition) => { + let variants = vec::map((*enum_definition).variants, |x| fld.fold_variant(*x)); - let common = option::map(&enum_definition.common, + let common = option::map(&(*enum_definition).common, |x| fold_struct_def(*x, fld)); kind = enum_variant_kind(ast::enum_def({ variants: variants, common: common })); diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 4c14f05d56b..a48e33c9405 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -199,9 +199,9 @@ impl Parser { while self.token != token::GT && self.token != token::BINOP(token::SHR) { match sep { - Some(t) => { + Some(ref t) => { if first { first = false; } - else { self.expect(t); } + else { self.expect((*t)); } } _ => () } @@ -243,9 +243,9 @@ impl Parser { let mut v: ~[T] = ~[]; while self.token != ket { match sep.sep { - Some(t) => { + Some(ref t) => { if first { first = false; } - else { self.expect(t); } + else { self.expect((*t)); } } _ => () } diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index a0cfece6b10..3763a74b9d2 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -406,9 +406,9 @@ fn scan_number(c: char, rdr: string_reader) -> token::Token { num_str += ~"." + dec_part; } match scan_exponent(rdr) { - Some(s) => { + Some(ref s) => { is_float = true; - num_str += s; + num_str += (*s); } None => () } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7cf279d0d81..bebced8f38a 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -136,7 +136,7 @@ macro_rules! maybe_whole_expr ( macro_rules! maybe_whole ( ($p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; } + INTERPOLATED(token::$constructor(ref x)) => { $p.bump(); return (*x); } _ => () }) ; (deref $p:expr, $constructor:ident) => ( match copy $p.token { @@ -155,7 +155,7 @@ macro_rules! maybe_whole ( _ => () }) ; (pair_empty $p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return (~[], x); } + INTERPOLATED(token::$constructor(ref x)) => { $p.bump(); return (~[], (*x)); } _ => () }) @@ -166,7 +166,7 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>) -> ~[attribute] { match rhs { None => lhs, - Some(attrs) => vec::append(lhs, attrs) + Some(ref attrs) => vec::append(lhs, (*attrs)) } } @@ -510,9 +510,9 @@ impl Parser { let lo = self.span.lo; match self.maybe_parse_dollar_mac() { - Some(e) => { + Some(ref e) => { return @{id: self.get_id(), - node: ty_mac(spanned(lo, self.span.hi, e)), + node: ty_mac(spanned(lo, self.span.hi, (*e))), span: mk_sp(lo, self.span.hi)}; } None => () @@ -928,7 +928,7 @@ impl Parser { let mut ex: expr_; match self.maybe_parse_dollar_mac() { - Some(x) => return self.mk_mac_expr(lo, self.span.hi, x), + Some(ref x) => return self.mk_mac_expr(lo, self.span.hi, (*x)), _ => () } @@ -2022,7 +2022,7 @@ impl Parser { pat = pat_tup(fields); } } - tok => { + copy tok => { if !is_ident_or_path(tok) || self.is_keyword(~"true") || self.is_keyword(~"false") @@ -2284,7 +2284,7 @@ impl Parser { let mut item_attrs; match self.parse_outer_attrs_or_ext(first_item_attrs) { None => item_attrs = ~[], - Some(Left(attrs)) => item_attrs = attrs, + Some(Left(ref attrs)) => item_attrs = (*attrs), Some(Right(ext)) => { return @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id())); @@ -2346,8 +2346,8 @@ impl Parser { let lo = self.span.lo; let us = self.eat_keyword(~"unsafe"); self.expect(token::LBRACE); - let {inner, next} = maybe_parse_inner_attrs_and_next(self, - parse_attrs); + let {inner: move inner, next: move next} = + maybe_parse_inner_attrs_and_next(self, parse_attrs); let blk_check_mode = if us { unsafe_blk } else { default_blk }; return (inner, self.parse_block_tail_(lo, blk_check_mode, next)); } @@ -2372,7 +2372,9 @@ impl Parser { let mut stmts = ~[]; let mut expr = None; - let {attrs_remaining, view_items, items: items, _} = + let {attrs_remaining: move attrs_remaining, + view_items: move view_items, + items: items, _} = self.parse_items_and_view_items(first_item_attrs, IMPORTS_AND_ITEMS_ALLOWED, false); @@ -2408,7 +2410,7 @@ impl Parser { token::RBRACE => { expr = Some(e); } - t => { + copy t => { if classify::stmt_ends_with_semi(*stmt) { self.fatal( ~"expected `;` or `}` after \ @@ -2421,12 +2423,12 @@ impl Parser { } } - stmt_mac(m, _) => { + stmt_mac(ref m, _) => { // Statement macro; might be an expr match self.token { token::SEMI => { self.bump(); - stmts.push(@{node: stmt_mac(m, true), + stmts.push(@{node: stmt_mac((*m), true), ..*stmt}); } token::RBRACE => { @@ -2435,7 +2437,7 @@ impl Parser { expr = Some( self.mk_mac_expr(stmt.span.lo, stmt.span.hi, - m.node)); + (*m).node)); } _ => { stmts.push(stmt); } } @@ -2847,7 +2849,7 @@ impl Parser { fields = ~[]; while self.token != token::RBRACE { match self.parse_class_item() { - dtor_decl(blk, attrs, s) => { + dtor_decl(ref blk, ref attrs, s) => { match the_dtor { Some((_, _, s_first)) => { self.span_note(s, fmt!("Duplicate destructor \ @@ -2857,7 +2859,7 @@ impl Parser { declared here"); } None => { - the_dtor = Some((blk, attrs, s)); + the_dtor = Some(((*blk), (*attrs), s)); } } } @@ -3007,7 +3009,9 @@ impl Parser { fn parse_mod_items(term: token::Token, +first_item_attrs: ~[attribute]) -> _mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: starting_items, _} = + let {attrs_remaining: move attrs_remaining, + view_items: move view_items, + items: starting_items, _} = self.parse_items_and_view_items(first_item_attrs, VIEW_ITEMS_AND_ITEMS_ALLOWED, true); @@ -3076,11 +3080,11 @@ impl Parser { // on the mod, then we'll go and suck in another file and merge // its contents match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") { - Some(path) => { + Some(ref path) => { let prefix = Path( self.sess.cm.span_to_filename(copy self.span)); let prefix = prefix.dir_path(); - let path = Path(path); + let path = Path((*path)); let (new_mod_item, new_attrs) = self.eval_src_mod_from_path( prefix, path, ~[], id_span); @@ -3113,7 +3117,7 @@ impl Parser { let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, ~"path") { - Some(d) => d, + Some(ref d) => (*d), None => default_path }; @@ -3143,7 +3147,7 @@ impl Parser { fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str { match ::attr::first_attr_value_str_by_name(attrs, ~"path") { - Some(d) => d, + Some(ref d) => (*d), None => default } } @@ -3208,7 +3212,10 @@ impl Parser { +first_item_attrs: ~[attribute]) -> foreign_mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: _, foreign_items} = + let {attrs_remaining: move attrs_remaining, + view_items: move view_items, + items: _, + foreign_items: move foreign_items} = self.parse_items_and_view_items(first_item_attrs, VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED, true); @@ -3341,7 +3348,7 @@ impl Parser { let mut methods: ~[@method] = ~[]; while self.token != token::RBRACE { match self.parse_class_item() { - dtor_decl(blk, attrs, s) => { + dtor_decl(ref blk, ref attrs, s) => { match the_dtor { Some((_, _, s_first)) => { self.span_note(s, ~"duplicate destructor \ @@ -3351,7 +3358,7 @@ impl Parser { declared here"); } None => { - the_dtor = Some((blk, attrs, s)); + the_dtor = Some(((*blk), (*attrs), s)); } } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 684c8414a01..a0aecd0375e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -190,9 +190,9 @@ fn to_str(in: @ident_interner, t: Token) -> ~str { /* Other */ DOC_COMMENT(s) => *in.get(s), EOF => ~"<eof>", - INTERPOLATED(nt) => { + INTERPOLATED(ref nt) => { ~"an interpolated " + - match nt { + match (*nt) { nt_item(*) => ~"item", nt_block(*) => ~"block", nt_stmt(*) => ~"statement", diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index df0c1214361..547f0141d33 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -387,7 +387,7 @@ fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { print_region(s, ~"&", region, ~"/"); print_mt(s, mt); } - ast::ty_rec(fields) => { + ast::ty_rec(ref fields) => { word(s.s, ~"{"); fn print_field(s: ps, f: ast::ty_field) { cbox(s, indent_unit); @@ -398,7 +398,7 @@ fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { end(s); } fn get_span(f: ast::ty_field) -> codemap::span { return f.span; } - commasep_cmnt(s, consistent, fields, print_field, get_span); + commasep_cmnt(s, consistent, (*fields), print_field, get_span); word(s.s, ~",}"); } ast::ty_tup(elts) => { @@ -479,11 +479,11 @@ fn print_item(s: ps, &&item: @ast::item) { end(s); // end the outer cbox } - ast::item_fn(decl, purity, typarams, body) => { + ast::item_fn(decl, purity, typarams, ref body) => { print_fn(s, decl, Some(purity), item.ident, typarams, None, item.vis); word(s.s, ~" "); - print_block_with_attrs(s, body, item.attrs); + print_block_with_attrs(s, (*body), item.attrs); } ast::item_mod(_mod) => { head(s, visibility_qualified(item.vis, ~"mod")); @@ -522,8 +522,8 @@ fn print_item(s: ps, &&item: @ast::item) { word(s.s, ~";"); end(s); // end the outer ibox } - ast::item_enum(enum_definition, params) => { - print_enum_def(s, enum_definition, params, item.ident, + ast::item_enum(ref enum_definition, params) => { + print_enum_def(s, (*enum_definition), params, item.ident, item.span, item.vis); } ast::item_class(struct_def, tps) => { @@ -558,7 +558,7 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } } - ast::item_trait(tps, traits, methods) => { + ast::item_trait(tps, traits, ref methods) => { head(s, visibility_qualified(item.vis, ~"trait")); print_ident(s, item.ident); print_type_params(s, tps); @@ -569,19 +569,19 @@ fn print_item(s: ps, &&item: @ast::item) { } word(s.s, ~" "); bopen(s); - for methods.each |meth| { + for (*methods).each |meth| { print_trait_method(s, *meth); } bclose(s, item.span); } - ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => { + ast::item_mac({node: ast::mac_invoc_tt(pth, ref tts), _}) => { print_visibility(s, item.vis); print_path(s, pth, false); word(s.s, ~"! "); print_ident(s, item.ident); cbox(s, indent_unit); popen(s); - for tts.each |tt| { + for (*tts).each |tt| { print_tt(s, *tt); } pclose(s); @@ -744,23 +744,23 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param], /// expression arguments as expressions). It can be done! I think. fn print_tt(s: ps, tt: ast::token_tree) { match tt { - ast::tt_delim(tts) => for tts.each() |tt_elt| { print_tt(s, *tt_elt); }, - ast::tt_tok(_, tk) => { - match tk { + ast::tt_delim(ref tts) => for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); }, + ast::tt_tok(_, ref tk) => { + match (*tk) { parse::token::IDENT(*) => { // don't let idents run together if s.s.token_tree_last_was_ident { word(s.s, ~" ") } s.s.token_tree_last_was_ident = true; } _ => { s.s.token_tree_last_was_ident = false; } } - word(s.s, parse::token::to_str(s.intr, tk)); + word(s.s, parse::token::to_str(s.intr, (*tk))); } - ast::tt_seq(_, tts, sep, zerok) => { + ast::tt_seq(_, ref tts, ref sep, zerok) => { word(s.s, ~"$("); - for tts.each() |tt_elt| { print_tt(s, *tt_elt); } + for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); } word(s.s, ~")"); - match sep { - Some(tk) => word(s.s, parse::token::to_str(s.intr, tk)), + match (*sep) { + Some(ref tk) => word(s.s, parse::token::to_str(s.intr, (*tk))), None => () } word(s.s, if zerok { ~"*" } else { ~"+" }); @@ -792,8 +792,8 @@ fn print_variant(s: ps, v: ast::variant) { head(s, ~""); print_struct(s, struct_def, ~[], v.node.name, v.span); } - ast::enum_variant_kind(enum_definition) => { - print_variants(s, enum_definition.variants, v.span); + ast::enum_variant_kind(ref enum_definition) => { + print_variants(s, (*enum_definition).variants, v.span); } } match v.node.disr_expr { @@ -818,7 +818,7 @@ fn print_ty_method(s: ps, m: ast::ty_method) { fn print_trait_method(s: ps, m: ast::trait_method) { match m { - required(ty_m) => print_ty_method(s, ty_m), + required(ref ty_m) => print_ty_method(s, (*ty_m)), provided(m) => print_method(s, m) } } @@ -892,9 +892,9 @@ fn print_stmt(s: ps, st: ast::stmt) { print_expr(s, expr); word(s.s, ~";"); } - ast::stmt_mac(mac, semi) => { + ast::stmt_mac(ref mac, semi) => { space_if_not_bol(s); - print_mac(s, mac); + print_mac(s, (*mac)); if semi { word(s.s, ~";"); } } } @@ -974,21 +974,21 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk, Some(_else) => { match _else.node { // "another else-if" - ast::expr_if(i, t, e) => { + ast::expr_if(i, ref t, e) => { cbox(s, indent_unit - 1u); ibox(s, 0u); word(s.s, ~" else if "); print_expr(s, i); space(s.s); - print_block(s, t); + print_block(s, (*t)); do_else(s, e); } // "final else" - ast::expr_block(b) => { + ast::expr_block(ref b) => { cbox(s, indent_unit - 1u); ibox(s, 0u); word(s.s, ~" else "); - print_block(s, b); + print_block(s, (*b)); } // BLEAH, constraints would be great here _ => { @@ -1014,11 +1014,11 @@ fn print_mac(s: ps, m: ast::mac) { arg.iter(|a| print_expr(s, *a)); // FIXME: extension 'body' (#2339) } - ast::mac_invoc_tt(pth, tts) => { + ast::mac_invoc_tt(pth, ref tts) => { print_path(s, pth, false); word(s.s, ~"!"); popen(s); - for tts.each() |tt| { print_tt(s, *tt); } + for (*tts).each() |tt| { print_tt(s, *tt); } pclose(s); } ast::mac_ellipsis => word(s.s, ~"..."), @@ -1149,9 +1149,9 @@ fn print_expr(s: ps, &&expr: @ast::expr) { end(s); } - ast::expr_rec(fields, wth) => { + ast::expr_rec(ref fields, wth) => { word(s.s, ~"{"); - commasep_cmnt(s, consistent, fields, print_field, get_span); + commasep_cmnt(s, consistent, (*fields), print_field, get_span); match wth { Some(expr) => { ibox(s, indent_unit); @@ -1165,13 +1165,13 @@ fn print_expr(s: ps, &&expr: @ast::expr) { } word(s.s, ~"}"); } - ast::expr_struct(path, fields, wth) => { + ast::expr_struct(path, ref fields, wth) => { print_path(s, path, true); word(s.s, ~"{"); - commasep_cmnt(s, consistent, fields, print_field, get_span); + commasep_cmnt(s, consistent, (*fields), print_field, get_span); match wth { Some(expr) => { - if vec::len(fields) > 0u { space(s.s); } + if vec::len((*fields)) > 0u { space(s.s); } ibox(s, indent_unit); word(s.s, ~","); space(s.s); @@ -1229,33 +1229,33 @@ fn print_expr(s: ps, &&expr: @ast::expr) { word_space(s, ~"as"); print_type_ex(s, ty, true); } - ast::expr_if(test, blk, elseopt) => { - print_if(s, test, blk, elseopt, false); + ast::expr_if(test, ref blk, elseopt) => { + print_if(s, test, (*blk), elseopt, false); } - ast::expr_while(test, blk) => { + ast::expr_while(test, ref blk) => { head(s, ~"while"); print_expr(s, test); space(s.s); - print_block(s, blk); + print_block(s, (*blk)); } - ast::expr_loop(blk, opt_ident) => { + ast::expr_loop(ref blk, opt_ident) => { head(s, ~"loop"); space(s.s); opt_ident.iter(|ident| { print_ident(s, *ident); word_space(s, ~":"); }); - print_block(s, blk); + print_block(s, (*blk)); } - ast::expr_match(expr, arms) => { + ast::expr_match(expr, ref arms) => { cbox(s, alt_indent_unit); ibox(s, 4); word_nbsp(s, ~"match"); print_expr(s, expr); space(s.s); bopen(s); - let len = arms.len(); - for arms.eachi |i, arm| { + let len = (*arms).len(); + for (*arms).eachi |i, arm| { space(s.s); cbox(s, alt_indent_unit); ibox(s, 0u); @@ -1287,10 +1287,10 @@ fn print_expr(s: ps, &&expr: @ast::expr) { match arm.body.node.expr { Some(expr) => { match expr.node { - ast::expr_block(blk) => { + ast::expr_block(ref blk) => { // the block will close the pattern's ibox print_block_unclosed_indent( - s, blk, alt_indent_unit); + s, (*blk), alt_indent_unit); } _ => { end(s); // close the ibox for the pattern @@ -1312,7 +1312,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { } bclose_(s, expr.span, alt_indent_unit); } - ast::expr_fn(proto, decl, body, cap_clause) => { + ast::expr_fn(proto, decl, ref body, cap_clause) => { // containing cbox, will be closed by print-block at } cbox(s, indent_unit); // head-box, will be closed by print-block at start @@ -1321,9 +1321,9 @@ fn print_expr(s: ps, &&expr: @ast::expr) { Some(proto), ast::inherited); print_fn_args_and_ret(s, decl, *cap_clause, None); space(s.s); - print_block(s, body); + print_block(s, (*body)); } - ast::expr_fn_block(decl, body, cap_clause) => { + ast::expr_fn_block(decl, ref body, cap_clause) => { // in do/for blocks we don't want to show an empty // argument list, but at this point we don't know which // we are inside. @@ -1332,16 +1332,16 @@ fn print_expr(s: ps, &&expr: @ast::expr) { print_fn_block_args(s, decl, *cap_clause); space(s.s); // } - assert body.node.stmts.is_empty(); - assert body.node.expr.is_some(); + assert (*body).node.stmts.is_empty(); + assert (*body).node.expr.is_some(); // we extract the block, so as not to create another set of boxes - match body.node.expr.get().node { - ast::expr_block(blk) => { - print_block_unclosed(s, blk); + match (*body).node.expr.get().node { + ast::expr_block(ref blk) => { + print_block_unclosed(s, (*blk)); } _ => { // this is a bare expression - print_expr(s, body.node.expr.get()); + print_expr(s, (*body).node.expr.get()); end(s); // need to close a box } } @@ -1356,12 +1356,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) { ast::expr_do_body(body) => { print_expr(s, body); } - ast::expr_block(blk) => { + ast::expr_block(ref blk) => { // containing cbox, will be closed by print-block at } cbox(s, indent_unit); // head-box, will be closed by print-block after { ibox(s, 0u); - print_block(s, blk); + print_block(s, (*blk)); } ast::expr_copy(e) => { word_space(s, ~"copy"); print_expr(s, e); } ast::expr_unary_move(e) => { @@ -1447,7 +1447,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { word_nbsp(s, ~"assert"); print_expr(s, expr); } - ast::expr_mac(m) => print_mac(s, m), + ast::expr_mac(ref m) => print_mac(s, (*m)), ast::expr_paren(e) => { popen(s); print_expr(s, e); @@ -1768,14 +1768,14 @@ fn print_type_params(s: ps, &¶ms: ~[ast::ty_param]) { fn print_meta_item(s: ps, &&item: @ast::meta_item) { ibox(s, indent_unit); match item.node { - ast::meta_word(name) => word(s.s, name), - ast::meta_name_value(name, value) => { - word_space(s, name); + ast::meta_word(ref name) => word(s.s, (*name)), + ast::meta_name_value(ref name, value) => { + word_space(s, (*name)); word_space(s, ~"="); print_literal(s, @value); } - ast::meta_list(name, items) => { - word(s.s, name); + ast::meta_list(ref name, items) => { + word(s.s, (*name)); popen(s); commasep(s, consistent, items, print_meta_item); pclose(s); @@ -1803,10 +1803,10 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) { word(s.s, ~"::*"); } - ast::view_path_list(path, idents, _) => { + ast::view_path_list(path, ref idents, _) => { print_path(s, path, false); word(s.s, ~"::{"); - do commasep(s, inconsistent, idents) |s, w| { + do commasep(s, inconsistent, (*idents)) |s, w| { print_ident(s, w.node.name); } word(s.s, ~"}"); @@ -1948,15 +1948,15 @@ fn maybe_print_trailing_comment(s: ps, span: codemap::span, let mut cm; match s.cm { Some(ccm) => cm = ccm, _ => return } match next_comment(s) { - Some(cmnt) => { - if cmnt.style != comments::trailing { return; } + Some(ref cmnt) => { + if (*cmnt).style != comments::trailing { return; } let span_line = cm.lookup_char_pos(span.hi); - let comment_line = cm.lookup_char_pos(cmnt.pos); - let mut next = cmnt.pos + BytePos(1u); + let comment_line = cm.lookup_char_pos((*cmnt).pos); + let mut next = (*cmnt).pos + BytePos(1u); match next_pos { None => (), Some(p) => next = p } - if span.hi < cmnt.pos && cmnt.pos < next && + if span.hi < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { - print_comment(s, cmnt); + print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } } @@ -1970,7 +1970,7 @@ fn print_remaining_comments(s: ps) { if next_comment(s).is_none() { hardbreak(s.s); } loop { match next_comment(s) { - Some(cmnt) => { print_comment(s, cmnt); s.cur_cmnt += 1u; } + Some(ref cmnt) => { print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } _ => break } } @@ -1979,8 +1979,8 @@ fn print_remaining_comments(s: ps) { fn print_literal(s: ps, &&lit: @ast::lit) { maybe_print_comment(s, lit.span.lo); match next_lit(s, lit.span.lo) { - Some(ltrl) => { - word(s.s, ltrl.lit); + Some(ref ltrl) => { + word(s.s, (*ltrl).lit); return; } _ => () @@ -2030,9 +2030,9 @@ fn lit_to_str(l: @ast::lit) -> ~str { fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> { match s.literals { - Some(lits) => { - while s.cur_lit < vec::len(lits) { - let ltrl = lits[s.cur_lit]; + Some(ref lits) => { + while s.cur_lit < vec::len((*lits)) { + let ltrl = (*lits)[s.cur_lit]; if ltrl.pos > pos { return None; } s.cur_lit += 1u; if ltrl.pos == pos { return Some(ltrl); } @@ -2046,9 +2046,9 @@ fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> { fn maybe_print_comment(s: ps, pos: BytePos) { loop { match next_comment(s) { - Some(cmnt) => { - if cmnt.pos < pos { - print_comment(s, cmnt); + Some(ref cmnt) => { + if (*cmnt).pos < pos { + print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } else { break; } } @@ -2117,9 +2117,9 @@ fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str { fn next_comment(s: ps) -> Option<comments::cmnt> { match s.comments { - Some(cmnts) => { - if s.cur_cmnt < vec::len(cmnts) { - return Some(cmnts[s.cur_cmnt]); + Some(ref cmnts) => { + if s.cur_cmnt < vec::len((*cmnts)) { + return Some((*cmnts)[s.cur_cmnt]); } else { return None::<comments::cmnt>; } } _ => return None::<comments::cmnt> diff --git a/src/libsyntax/syntax.rc b/src/libsyntax/syntax.rc index 55bac5f8275..66052767bd4 100644 --- a/src/libsyntax/syntax.rc +++ b/src/libsyntax/syntax.rc @@ -24,7 +24,7 @@ #[allow(vecs_implicitly_copyable)]; #[allow(non_camel_case_types)]; #[allow(deprecated_mode)]; -#[allow(deprecated_pattern)]; +#[warn(deprecated_pattern)]; extern mod core(vers = "0.5"); extern mod std(vers = "0.5"); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 6ca735469a6..4a399c5a0dd 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -125,10 +125,10 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { (v.visit_ty)(t, e, v); (v.visit_expr)(ex, e, v); } - item_fn(decl, purity, tp, body) => { + item_fn(decl, purity, tp, ref body) => { (v.visit_fn)(fk_item_fn(/* FIXME (#2543) */ copy i.ident, /* FIXME (#2543) */ copy tp, - purity), decl, body, + purity), decl, (*body), i.span, i.id, e, v); } item_mod(m) => (v.visit_mod)(m, i.span, i.id, e, v), @@ -140,9 +140,9 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { (v.visit_ty)(t, e, v); (v.visit_ty_params)(tps, e, v); } - item_enum(enum_definition, tps) => { + item_enum(ref enum_definition, tps) => { (v.visit_ty_params)(tps, e, v); - visit_enum_def(enum_definition, tps, e, v); + visit_enum_def((*enum_definition), tps, e, v); } item_impl(tps, traits, ty, methods) => { (v.visit_ty_params)(tps, e, v); @@ -158,14 +158,14 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) { (v.visit_ty_params)(tps, e, v); (v.visit_struct_def)(struct_def, i.ident, tps, i.id, e, v); } - item_trait(tps, traits, methods) => { + item_trait(tps, traits, ref methods) => { (v.visit_ty_params)(tps, e, v); for traits.each |p| { visit_path(p.path, e, v); } - for methods.each |m| { + for (*methods).each |m| { (v.visit_trait_method)(*m, e, v); } } - item_mac(m) => visit_mac(m, e, v) + item_mac(ref m) => visit_mac((*m), e, v) } } @@ -180,8 +180,8 @@ fn visit_enum_def<E>(enum_definition: ast::enum_def, tps: ~[ast::ty_param], (v.visit_struct_def)(struct_def, vr.node.name, tps, vr.node.id, e, v); } - enum_variant_kind(enum_definition) => { - visit_enum_def(enum_definition, tps, e, v); + enum_variant_kind(ref enum_definition) => { + visit_enum_def((*enum_definition), tps, e, v); } } // Visit the disr expr if it exists @@ -197,7 +197,7 @@ fn visit_ty<E>(t: @Ty, e: E, v: vt<E>) { ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { (v.visit_ty)(mt.ty, e, v); } - ty_rec(flds) => for flds.each |f| { + ty_rec(ref flds) => for (*flds).each |f| { (v.visit_ty)(f.node.mt.ty, e, v); }, ty_tup(ts) => for ts.each |tt| { @@ -320,7 +320,7 @@ fn visit_ty_method<E>(m: ty_method, e: E, v: vt<E>) { fn visit_trait_method<E>(m: trait_method, e: E, v: vt<E>) { match m { - required(ty_m) => (v.visit_ty_method)(ty_m, e, v), + required(ref ty_m) => (v.visit_ty_method)((*ty_m), e, v), provided(m) => visit_method_helper(m, e, v) } } @@ -364,7 +364,7 @@ fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) { stmt_decl(d, _) => (v.visit_decl)(d, e, v), stmt_expr(ex, _) => (v.visit_expr)(ex, e, v), stmt_semi(ex, _) => (v.visit_expr)(ex, e, v), - stmt_mac(mac, _) => visit_mac(mac, e, v) + stmt_mac(ref mac, _) => visit_mac((*mac), e, v) } } @@ -404,13 +404,13 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { (v.visit_expr)(element, e, v); (v.visit_expr)(count, e, v); } - expr_rec(flds, base) => { - for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + expr_rec(ref flds, base) => { + for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } visit_expr_opt(base, e, v); } - expr_struct(p, flds, base) => { + expr_struct(p, ref flds, base) => { visit_path(p, e, v); - for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } visit_expr_opt(base, e, v); } expr_tup(elts) => for elts.each |el| { (v.visit_expr)(*el, e, v); }, @@ -431,29 +431,29 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { expr_assert(x) => (v.visit_expr)(x, e, v), expr_lit(_) => (), expr_cast(x, t) => { (v.visit_expr)(x, e, v); (v.visit_ty)(t, e, v); } - expr_if(x, b, eo) => { + expr_if(x, ref b, eo) => { (v.visit_expr)(x, e, v); - (v.visit_block)(b, e, v); + (v.visit_block)((*b), e, v); visit_expr_opt(eo, e, v); } - expr_while(x, b) => { + expr_while(x, ref b) => { (v.visit_expr)(x, e, v); - (v.visit_block)(b, e, v); + (v.visit_block)((*b), e, v); } - expr_loop(b, _) => (v.visit_block)(b, e, v), - expr_match(x, arms) => { + expr_loop(ref b, _) => (v.visit_block)((*b), e, v), + expr_match(x, ref arms) => { (v.visit_expr)(x, e, v); - for arms.each |a| { (v.visit_arm)(*a, e, v); } + for (*arms).each |a| { (v.visit_arm)(*a, e, v); } } - expr_fn(proto, decl, body, cap_clause) => { - (v.visit_fn)(fk_anon(proto, cap_clause), decl, body, + expr_fn(proto, decl, ref body, cap_clause) => { + (v.visit_fn)(fk_anon(proto, cap_clause), decl, (*body), ex.span, ex.id, e, v); } - expr_fn_block(decl, body, cap_clause) => { - (v.visit_fn)(fk_fn_block(cap_clause), decl, body, + expr_fn_block(decl, ref body, cap_clause) => { + (v.visit_fn)(fk_fn_block(cap_clause), decl, (*body), ex.span, ex.id, e, v); } - expr_block(b) => (v.visit_block)(b, e, v), + expr_block(ref b) => (v.visit_block)((*b), e, v), expr_assign(a, b) => { (v.visit_expr)(b, e, v); (v.visit_expr)(a, e, v); @@ -482,7 +482,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) { (v.visit_expr)(lv, e, v); (v.visit_expr)(x, e, v); } - expr_mac(mac) => visit_mac(mac, e, v), + expr_mac(ref mac) => visit_mac((*mac), e, v), expr_paren(x) => (v.visit_expr)(x, e, v), } (v.visit_expr_post)(ex, e, v); |
