diff options
| author | Patrick Walton <pcwalton@mimiga.net> | 2012-12-04 10:50:00 -0800 |
|---|---|---|
| committer | Patrick Walton <pcwalton@mimiga.net> | 2012-12-04 14:19:19 -0800 |
| commit | 56ece46f7de9d1703dd39f952afac9bed22633b6 (patch) | |
| tree | ede7f9ee4631628cc8603e692a0a3c1c5a00577e /src/libsyntax/ext | |
| parent | 94be14516968501306f1ed95774a3f227956e809 (diff) | |
| download | rust-56ece46f7de9d1703dd39f952afac9bed22633b6.tar.gz rust-56ece46f7de9d1703dd39f952afac9bed22633b6.zip | |
librustc: Remove all legacy pattern bindings from libsyntax and librustc. rs=refactoring
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/auto_serialize.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/env.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 48 | ||||
| -rw-r--r-- | src/libsyntax/ext/fmt.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/check.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/pipec.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/proto.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/qquote.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/simplext.rs | 52 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 26 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 32 |
15 files changed, 128 insertions, 128 deletions
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 1242d528194..a42a51fd302 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -120,12 +120,12 @@ fn expand_auto_serialize( do vec::flat_map(in_items) |item| { if item.attrs.any(is_auto_serialize) { match item.node { - ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => { + ast::item_ty(@{node: ast::ty_rec(ref fields), _}, tps) => { let ser_impl = mk_rec_ser_impl( cx, item.span, item.ident, - fields, + (*fields), tps ); @@ -142,12 +142,12 @@ fn expand_auto_serialize( ~[filter_attrs(*item), ser_impl] }, - ast::item_enum(enum_def, tps) => { + ast::item_enum(ref enum_def, tps) => { let ser_impl = mk_enum_ser_impl( cx, item.span, item.ident, - enum_def, + (*enum_def), tps ); @@ -184,12 +184,12 @@ fn expand_auto_deserialize( do vec::flat_map(in_items) |item| { if item.attrs.any(is_auto_deserialize) { match item.node { - ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => { + ast::item_ty(@{node: ast::ty_rec(ref fields), _}, tps) => { let deser_impl = mk_rec_deser_impl( cx, item.span, item.ident, - fields, + (*fields), tps ); @@ -206,12 +206,12 @@ fn expand_auto_deserialize( ~[filter_attrs(*item), deser_impl] }, - ast::item_enum(enum_def, tps) => { + ast::item_enum(ref enum_def, tps) => { let deser_impl = mk_enum_deser_impl( cx, item.span, item.ident, - enum_def, + (*enum_def), tps ); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 79dbbbe0b72..630ba3b8749 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -202,12 +202,12 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn mod_path() -> ~[ast::ident] { return self.mod_path; } fn bt_push(ei: codemap::ExpnInfo) { match ei { - ExpandedFrom({call_site: cs, callie: callie}) => { + ExpandedFrom({call_site: cs, callie: ref callie}) => { self.backtrace = Some(@ExpandedFrom({ call_site: span {lo: cs.lo, hi: cs.hi, expn_info: self.backtrace}, - callie: callie})); + callie: (*callie)})); } } } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 1371cd30308..51db63c819a 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -28,7 +28,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let var = expr_to_str(cx, args[0], ~"env! requires a string"); match os::getenv(var) { option::None => return mk_uniq_str(cx, sp, ~""), - option::Some(s) => return mk_uniq_str(cx, sp, s) + option::Some(ref s) => return mk_uniq_str(cx, sp, (*s)) } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index a9fdcc18661..6efca050fa5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -29,9 +29,9 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, return match e { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. - expr_mac(mac) => { + expr_mac(ref mac) => { - match mac.node { + match (*mac).node { // Old-style macros. For compatibility, will erase this whole // block once we've transitioned. mac_invoc(pth, args, body) => { @@ -50,7 +50,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("%s can only be used as a decorator", *extname)); } Some(normal({expander: exp, span: exp_sp})) => { - let expanded = exp(cx, mac.span, args, body); + let expanded = exp(cx, (*mac).span, args, body); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); @@ -61,7 +61,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, (fully_expanded, s) } Some(macro_defining(ext)) => { - let named_extension = ext(cx, mac.span, args, body); + let named_extension = ext(cx, (*mac).span, args, body); exts.insert(named_extension.name, named_extension.ext); (ast::expr_rec(~[], None), s) } @@ -79,7 +79,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, // Token-tree macros, these will be the only case when we're // finished transitioning. - mac_invoc_tt(pth, tts) => { + mac_invoc_tt(pth, ref tts) => { assert (vec::len(pth.idents) == 1u); /* using idents and token::special_idents would make the the macro names be hygienic */ @@ -90,7 +90,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("macro undefined: '%s'", *extname)) } Some(normal_tt({expander: exp, span: exp_sp})) => { - let expanded = match exp(cx, mac.span, tts) { + let expanded = match exp(cx, (*mac).span, (*tts)) { mr_expr(e) => e, mr_any(expr_maker,_,_) => expr_maker(), _ => cx.span_fatal( @@ -109,8 +109,8 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, Some(normal({expander: exp, span: exp_sp})) => { //convert the new-style invoc for the old-style macro let arg = base::tt_args_to_original_flavor(cx, pth.span, - tts); - let expanded = exp(cx, mac.span, arg, None); + (*tts)); + let expanded = exp(cx, (*mac).span, arg, None); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); @@ -128,7 +128,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, } } - _ => cx.span_bug(mac.span, ~"naked syntactic bit") + _ => cx.span_bug((*mac).span, ~"naked syntactic bit") } } _ => orig(e, s, fld) @@ -158,9 +158,9 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, let new_items = do vec::flat_map(module_.items) |item| { do vec::foldr(item.attrs, ~[*item]) |attr, items| { let mname = match attr.node.value.node { - ast::meta_word(n) => n, - ast::meta_name_value(n, _) => n, - ast::meta_list(n, _) => n + ast::meta_word(ref n) => (*n), + ast::meta_name_value(ref n, _) => (*n), + ast::meta_list(ref n, _) => (*n) }; match exts.find(mname) { None | Some(normal(_)) | Some(macro_defining(_)) @@ -227,10 +227,10 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, &&it: @ast::item, fld: ast_fold) -> Option<@ast::item> { let (pth, tts) = biased_match!( - (it.node) ~ (item_mac({node: mac_invoc_tt(pth, tts), _})) else { + (it.node) ~ (item_mac({node: mac_invoc_tt(pth, ref tts), _})) else { cx.span_bug(it.span, ~"invalid item macro invocation") }; - => (pth, tts) + => (pth, (*tts)) ); let extname = cx.parse_sess().interner.get(pth.idents[0]); @@ -238,22 +238,22 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, None => cx.span_fatal(pth.span, fmt!("macro undefined: '%s!'", *extname)), - Some(normal_tt(expand)) => { + Some(normal_tt(ref expand)) => { if it.ident != parse::token::special_idents::invalid { cx.span_fatal(pth.span, fmt!("macro %s! expects no ident argument, \ given '%s'", *extname, *cx.parse_sess().interner.get(it.ident))); } - ((expand.expander)(cx, it.span, tts), expand.span) + (((*expand).expander)(cx, it.span, tts), (*expand).span) } - Some(item_tt(expand)) => { + Some(item_tt(ref expand)) => { if it.ident == parse::token::special_idents::invalid { cx.span_fatal(pth.span, fmt!("macro %s! expects an ident argument", *extname)); } - ((expand.expander)(cx, it.span, it.ident, tts), expand.span) + (((*expand).expander)(cx, it.span, it.ident, tts), (*expand).span) } _ => cx.span_fatal( it.span, fmt!("%s! is not legal in item position", *extname)) @@ -268,8 +268,8 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, + *extname), mr_any(_, item_maker, _) => option::chain(item_maker(), |i| {fld.fold_item(i)}), - mr_def(mdef) => { - exts.insert(mdef.name, mdef.ext); + mr_def(ref mdef) => { + exts.insert((*mdef).name, (*mdef).ext); None } }; @@ -283,11 +283,11 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, -> (stmt_, span) { let (mac, pth, tts, semi) = biased_match! ( - (s) ~ (stmt_mac(mac, semi)) else return orig(s, sp, fld); - (mac.node) ~ (mac_invoc_tt(pth, tts)) else { - cx.span_bug(mac.span, ~"naked syntactic bit") + (s) ~ (stmt_mac(ref mac, semi)) else return orig(s, sp, fld); + ((*mac).node) ~ (mac_invoc_tt(pth, ref tts)) else { + cx.span_bug((*mac).span, ~"naked syntactic bit") }; - => (mac, pth, tts, semi)); + => ((*mac), pth, (*tts), semi)); assert(vec::len(pth.idents) == 1u); let extname = cx.parse_sess().interner.get(pth.idents[0]); diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index d6ea6791284..e0d3bd03f42 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -255,8 +255,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let nargs = args.len(); for pieces.each |pc| { match *pc { - PieceString(s) => { - piece_exprs.push(mk_uniq_str(cx, fmt_sp, s)) + PieceString(ref s) => { + piece_exprs.push(mk_uniq_str(cx, fmt_sp, (*s))) } PieceConv(conv) => { n += 1u; diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index cfe4a3d19ac..cd76655fef6 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -50,18 +50,18 @@ impl ext_ctxt: proto::visitor<(), (), ()> { fn visit_message(name: ~str, _span: span, _tys: &[@ast::Ty], this: state, next: next_state) { match next { - Some({state: next, tys: next_tys}) => { + Some({state: ref next, tys: next_tys}) => { let proto = this.proto; - if !proto.has_state(next) { + if !proto.has_state((*next)) { // This should be a span fatal, but then we need to // track span information. self.span_err( - proto.get_state(next).span, + proto.get_state((*next)).span, fmt!("message %s steps to undefined state, %s", - name, next)); + name, (*next))); } else { - let next = proto.get_state(next); + let next = proto.get_state((*next)); if next.ty_params.len() != next_tys.len() { self.span_err( diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 87db2b1cf63..1c4dd197105 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -55,10 +55,10 @@ impl message: gen_send { fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item { debug!("pipec: gen_send"); match self { - message(_id, span, tys, this, - Some({state: next, tys: next_tys})) => { + message(ref _id, span, tys, this, + Some({state: ref next, tys: next_tys})) => { debug!("pipec: next state exists"); - let next = this.proto.get_state(next); + let next = this.proto.get_state((*next)); assert next_tys.len() == next.ty_params.len(); let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str())); @@ -139,7 +139,7 @@ impl message: gen_send { cx.expr_block(body)) } - message(_id, span, tys, this, None) => { + message(ref _id, span, tys, this, None) => { debug!("pipec: no next state"); let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str())); @@ -220,8 +220,8 @@ impl state: to_type_decls { let message(name, span, tys, this, next) = *m; let tys = match next { - Some({state: next, tys: next_tys}) => { - let next = this.proto.get_state(next); + Some({state: ref next, tys: next_tys}) => { + let next = this.proto.get_state((*next)); let next_name = cx.str_of(next.data_name()); let dir = match this.dir { diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index f8c4648dd4a..af75c9e71dc 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -55,7 +55,7 @@ enum message { impl message { fn name() -> ~str { match self { - message(id, _, _, _, _) => id + message(ref id, _, _, _, _) => (*id) } } @@ -113,8 +113,8 @@ impl state { fn reachable(f: fn(state) -> bool) { for self.messages.each |m| { match *m { - message(_, _, _, _, Some({state: id, _})) => { - let state = self.proto.get_state(id); + message(_, _, _, _, Some({state: ref id, _})) => { + let state = self.proto.get_state((*id)); if !f(state) { break } } _ => () diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index 346798c9029..e13dfe750b7 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -69,7 +69,7 @@ impl @ast::expr: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);} fn extract_mac() -> Option<ast::mac_> { match (self.node) { - ast::expr_mac({node: mac, _}) => Some(mac), + ast::expr_mac({node: ref mac, _}) => Some((*mac)), _ => None } } @@ -84,7 +84,7 @@ impl @ast::Ty: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);} fn extract_mac() -> Option<ast::mac_> { match (self.node) { - ast::ty_mac({node: mac, _}) => Some(mac), + ast::ty_mac({node: ref mac, _}) => Some((*mac)), _ => None } } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 27760e4117f..b2e651c7e33 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -104,15 +104,15 @@ fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { let e_expn_info = match sp.expn_info { None => build::mk_path(cx, qsp, ids_ext(cx, ~[~"None"])), - Some(@codemap::ExpandedFrom(cr)) => { + Some(@codemap::ExpandedFrom(ref cr)) => { let e_callee = build::mk_rec_e( cx, qsp, ~[{ident: id_ext(cx, ~"name"), ex: build::mk_uniq_str(cx, qsp, - cr.callie.name)}, + (*cr).callie.name)}, {ident: id_ext(cx, ~"span"), - ex: mk_option_span(cx, qsp, cr.callie.span)}]); + ex: mk_option_span(cx, qsp, (*cr).callie.span)}]); let e_expn_info_ = build::mk_call( @@ -121,7 +121,7 @@ fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { ~[build::mk_rec_e( cx, qsp, ~[{ident: id_ext(cx, ~"call_site"), - ex: mk_span(cx, qsp, cr.call_site)}, + ex: mk_span(cx, qsp, (*cr).call_site)}, {ident: id_ext(cx, ~"callie"), ex: e_callee}])]); @@ -327,20 +327,20 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr { fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree) -> @ast::expr { match *tt { - ast::tt_tok(sp, tok) => { + ast::tt_tok(sp, ref tok) => { let e_tok = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_tok"]), ~[mk_span(cx, sp, sp), - mk_token(cx, sp, tok)]); + mk_token(cx, sp, (*tok))]); build::mk_uniq_vec_e(cx, sp, ~[e_tok]) } - ast::tt_delim(tts) => { + ast::tt_delim(ref tts) => { let e_delim = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_delim"]), - ~[mk_tts(cx, sp, tts)]); + ~[mk_tts(cx, sp, (*tts))]); build::mk_uniq_vec_e(cx, sp, ~[e_delim]) } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index 5e47dee548f..1bf24670aab 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -55,8 +55,8 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! { x.span, ~"this argument is an ident, expected " + expected), match_ty(x) => cx.span_fatal( x.span, ~"this argument is a type, expected " + expected), - match_block(x) => cx.span_fatal( - x.span, ~"this argument is a block, expected " + expected), + match_block(ref x) => cx.span_fatal( + (*x).span, ~"this argument is a block, expected " + expected), match_exact => cx.bug(~"what is a match_exact doing in a bindings?") } } @@ -76,10 +76,10 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) -> let mut res = None; for elts.each |elt| { match elt.node { - expr_mac(m) => match m.node { + expr_mac(ref m) => match (*m).node { ast::mac_ellipsis => { if res.is_some() { - cx.span_fatal(m.span, ~"only one ellipsis allowed"); + cx.span_fatal((*m).span, ~"only one ellipsis allowed"); } res = Some({pre: vec::slice(elts, 0u, idx - 1u), @@ -104,7 +104,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> for v.each |elem| { match f(*elem) { None => return None, - Some(fv) => res.push(fv) + Some(ref fv) => res.push((*fv)) } } return Some(res); @@ -112,7 +112,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) -> fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result { match ad { - leaf(x) => return f(x), + leaf(ref x) => return f((*x)), seq(ads, span) => match option_flatten_map(|x| a_d_map(x, f), *ads) { None => return None, Some(ts) => return Some(seq(@ts, span)) @@ -124,7 +124,7 @@ fn compose_sels(s1: selector, s2: selector) -> selector { fn scomp(s1: selector, s2: selector, m: matchable) -> match_result { return match s1(m) { None => None, - Some(matches) => a_d_map(matches, s2) + Some(ref matches) => a_d_map((*matches), s2) } } return { |x| scomp(s1, s2, x) }; @@ -172,7 +172,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> Option<bindings> { for b.real_binders.each |key, val| { match val(match_expr(e)) { None => never_mind = true, - Some(mtc) => { res.insert(key, mtc); } + Some(ref mtc) => { res.insert(key, (*mtc)); } } }; //HACK: `ret` doesn't work in `for each` @@ -231,14 +231,14 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: Option<arb_depth<matchable>>, idx_path: @mut ~[uint]) -> Option<matchable> { match mmaybe { None => return None, - Some(m) => { - return match follow(m, *idx_path) { + Some(ref m) => { + return match follow((*m), *idx_path) { seq(_, sp) => { cx.span_fatal(sp, ~"syntax matched under ... but not " + ~"used that way.") } - leaf(m) => return Some(m) + leaf(ref m) => return Some((*m)) } } } @@ -337,7 +337,7 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], &&i: ident, _fld: ast_fold) -> ident { return match follow_for_trans(cx, b.find(i), idx_path) { Some(match_ident(a_id)) => a_id.node, - Some(m) => match_error(cx, m, ~"an identifier"), + Some(ref m) => match_error(cx, (*m), ~"an identifier"), None => i } } @@ -353,7 +353,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], rp: None, types: ~[]} } Some(match_path(a_pth)) => *a_pth, - Some(m) => match_error(cx, m, ~"a path"), + Some(ref m) => match_error(cx, (*m), ~"a path"), None => p } } @@ -380,7 +380,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], } Some(match_path(a_pth)) => (expr_path(a_pth), s), Some(match_expr(a_exp)) => (a_exp.node, a_exp.span), - Some(m) => match_error(cx, m, ~"an expression"), + Some(ref m) => match_error(cx, (*m), ~"an expression"), None => orig(e, s, fld) } } @@ -399,7 +399,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], Some(id) => { match follow_for_trans(cx, b.find(id), idx_path) { Some(match_ty(ty)) => (ty.node, ty.span), - Some(m) => match_error(cx, m, ~"a type"), + Some(ref m) => match_error(cx, (*m), ~"a type"), None => orig(t, s, fld) } } @@ -422,10 +422,10 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], return match block_to_ident(blk) { Some(id) => { match follow_for_trans(cx, b.find(id), idx_path) { - Some(match_block(new_blk)) => (new_blk.node, new_blk.span), + Some(match_block(ref new_blk)) => ((*new_blk).node, (*new_blk).span), // possibly allow promotion of ident/path/expr to blocks? - Some(m) => match_error(cx, m, ~"a block"), + Some(ref m) => match_error(cx, (*m), ~"a block"), None => orig(blk, s, fld) } } @@ -468,8 +468,8 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { } } /* FIXME (#2251): handle embedded types and blocks, at least */ - expr_mac(mac) => { - p_t_s_r_mac(cx, mac, s, b); + expr_mac(ref mac) => { + p_t_s_r_mac(cx, (*mac), s, b); } _ => { fn select(cx: ext_ctxt, m: matchable, pat: @expr) -> @@ -548,7 +548,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) { fn_m: fn(ast::mac) -> match_result) -> match_result { return match m { match_expr(e) => match e.node { - expr_mac(mac) => fn_m(mac), + expr_mac(ref mac) => fn_m((*mac)), _ => None }, _ => cx.bug(~"broken traversal in p_t_s_r") @@ -659,15 +659,15 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, match elts[0u].node { - expr_mac(mac) => { - match mac.node { + expr_mac(ref mac) => { + match (*mac).node { mac_invoc(pth, invoc_arg, _) => { match path_to_ident(pth) { Some(id) => { let id_str = cx.str_of(id); match macro_name { None => macro_name = Some(id_str), - Some(other_id) => if id_str != other_id { + Some(ref other_id) => if id_str != (*other_id) { cx.span_fatal(pth.span, ~"macro name must be " + ~"consistent"); @@ -679,7 +679,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, } let arg = match invoc_arg { Some(arg) => arg, - None => cx.span_fatal(mac.span, + None => cx.span_fatal((*mac).span, ~"macro must have arguments") }; clauses.push(@{params: pattern_to_selectors(cx, arg), @@ -689,7 +689,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, // the macro arg situation) } _ => { - cx.span_bug(mac.span, ~"undocumented invariant in \ + cx.span_bug((*mac).span, ~"undocumented invariant in \ add_extension"); } } @@ -712,7 +712,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, return {name: match macro_name { - Some(id) => id, + Some(ref id) => (*id), None => cx.span_fatal(sp, ~"macro definition must have " + ~"at least one clause") }, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 0e1eb2ee2df..aa97646c054 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -83,8 +83,8 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file))); match res { result::Ok(_) => { /* Continue. */ } - result::Err(e) => { - cx.parse_sess().span_diagnostic.handler().fatal(e); + result::Err(ref e) => { + cx.parse_sess().span_diagnostic.handler().fatal((*e)); } } @@ -104,8 +104,8 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, }); return mk_base_vec_e(cx, sp, u8_exprs); } - result::Err(e) => { - cx.parse_sess().span_diagnostic.handler().fatal(e) + result::Err(ref e) => { + cx.parse_sess().span_diagnostic.handler().fatal((*e)) } } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 44a3774ddd0..e51800b8a61 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -117,8 +117,8 @@ type matcher_pos = ~{ }; fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos { - match mpu { - matcher_pos_up(Some(mp)) => copy mp, + match &mpu { + &matcher_pos_up(Some(ref mp)) => copy (*mp), _ => fail } } @@ -127,7 +127,7 @@ fn count_names(ms: &[matcher]) -> uint { vec::foldl(0u, ms, |ct, m| { ct + match m.node { match_tok(_) => 0u, - match_seq(more_ms, _, _, _, _) => count_names(more_ms), + match_seq(ref more_ms, _, _, _, _) => count_names((*more_ms)), match_nonterminal(_,_,_) => 1u }}) } @@ -184,8 +184,8 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) ret_val: HashMap<ident, @named_match>) { match m { {node: match_tok(_), span: _} => (), - {node: match_seq(more_ms, _, _, _, _), span: _} => { - for more_ms.each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; + {node: match_seq(ref more_ms, _, _, _, _), span: _} => { + for (*more_ms).each() |next_m| { n_rec(p_s, *next_m, res, ret_val) }; } {node: match_nonterminal(bind_name, _, idx), span: sp} => { if ret_val.contains_key(bind_name) { @@ -211,8 +211,8 @@ fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) -> HashMap<ident, @named_match> { match parse(sess, cfg, rdr, ms) { success(m) => m, - failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), - error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) + failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)), + error(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)) } } @@ -274,8 +274,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) // the *_t vars are workarounds for the lack of unary move match copy ei.sep { - Some(t) if idx == len => { // we need a separator - if tok == t { //pass the separator + Some(ref t) if idx == len => { // we need a separator + if tok == (*t) { //pass the separator let ei_t = move ei; ei_t.idx += 1; next_eis.push(move ei_t); @@ -293,7 +293,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } else { match copy ei.elts[idx].node { /* need to descend into sequence */ - match_seq(matchers, sep, zero_ok, + match_seq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { if zero_ok { let new_ei = copy ei; @@ -310,7 +310,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) |_m| DVec::<@named_match>()); let ei_t = move ei; cur_eis.push(~{ - elts: matchers, sep: sep, mut idx: 0u, + elts: (*matchers), sep: (*sep), mut idx: 0u, mut up: matcher_pos_up(Some(move ei_t)), matches: move matches, match_lo: match_idx_lo, match_hi: match_idx_hi, @@ -318,9 +318,9 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) }); } match_nonterminal(_,_,_) => { bb_eis.push(move ei) } - match_tok(t) => { + match_tok(ref t) => { let ei_t = move ei; - if t == tok { + if (*t) == tok { ei_t.idx += 1; next_eis.push(move ei_t); } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 0767a3cce83..09415703260 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -84,17 +84,17 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, for lhses.eachi() |i, lhs| { // try each arm's matchers match *lhs { - @matched_nonterminal(nt_matchers(mtcs)) => { + @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating let arg_rdr = new_tt_reader(s_d, itr, None, arg) as reader; - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs) { + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { success(named_matches) => { let rhs = match rhses[i] { // okay, what's your transcriber? - @matched_nonterminal(nt_tt(@tt)) => { - match tt { + @matched_nonterminal(nt_tt(@ref tt)) => { + match (*tt) { // cut off delimiters; don't parse 'em - tt_delim(tts) => tts.slice(1u,tts.len()-1u), + tt_delim(ref tts) => (*tts).slice(1u,(*tts).len()-1u), _ => cx.span_fatal( sp, ~"macro rhs must be delimited") } @@ -113,11 +113,11 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, || p.parse_item(~[/* no attrs*/]), || p.parse_stmt(~[/* no attrs*/])); } - failure(sp, msg) => if sp.lo >= best_fail_spot.lo { + failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; - best_fail_msg = msg; + best_fail_msg = (*msg); }, - error(sp, msg) => cx.span_fatal(sp, msg) + error(sp, ref msg) => cx.span_fatal(sp, (*msg)) } } _ => cx.bug(~"non-matcher found in parsed lhses") diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index eeb8b068b5b..3d901039188 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -130,8 +130,8 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis { } } match t { - tt_delim(tts) | tt_seq(_, tts, _, _) => { - vec::foldl(lis_unconstrained, tts, |lis, tt| + tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => { + vec::foldl(lis_unconstrained, (*tts), |lis, tt| lis_merge(lis, lockstep_iter_size(*tt, r), r)) } tt_tok(*) => lis_unconstrained, @@ -170,8 +170,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { r.cur.idx = 0u; r.repeat_idx[r.repeat_idx.len() - 1u] += 1u; match r.cur.sep { - Some(tk) => { - r.cur_tok = tk; /* repeat same span, I guess */ + Some(ref tk) => { + r.cur_tok = (*tk); /* repeat same span, I guess */ return ret_val; } None => () @@ -181,27 +181,27 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { loop { /* because it's easiest, this handles `tt_delim` not starting with a `tt_tok`, even though it won't happen */ match r.cur.readme[r.cur.idx] { - tt_delim(tts) => { - r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: false, + tt_delim(ref tts) => { + r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: false, sep: None, up: tt_frame_up(option::Some(r.cur)) }; // if this could be 0-length, we'd need to potentially recur here } - tt_tok(sp, tok) => { - r.cur_span = sp; r.cur_tok = tok; + tt_tok(sp, ref tok) => { + r.cur_span = sp; r.cur_tok = (*tok); r.cur.idx += 1u; return ret_val; } - tt_seq(sp, tts, sep, zerok) => { - match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) { + tt_seq(sp, ref tts, ref sep, zerok) => { + match lockstep_iter_size(tt_seq(sp, (*tts), (*sep), zerok), r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ ~"attempted to repeat an expression containing no syntax \ variables matched as repeating at this depth"); } - lis_contradiction(msg) => { /* FIXME #2887 blame macro invoker + lis_contradiction(ref msg) => { /* FIXME #2887 blame macro invoker instead*/ - r.sp_diag.span_fatal(sp, msg); + r.sp_diag.span_fatal(sp, (*msg)); } lis_constraint(len, _) => { if len == 0 { @@ -217,8 +217,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { } else { r.repeat_len.push(len); r.repeat_idx.push(0u); - r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: true, - sep: sep, up: tt_frame_up(option::Some(r.cur))}; + r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: true, + sep: (*sep), up: tt_frame_up(option::Some(r.cur))}; } } } @@ -234,8 +234,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { r.cur.idx += 1u; return ret_val; } - matched_nonterminal(other_whole_nt) => { - r.cur_span = sp; r.cur_tok = INTERPOLATED(other_whole_nt); + matched_nonterminal(ref other_whole_nt) => { + r.cur_span = sp; r.cur_tok = INTERPOLATED((*other_whole_nt)); r.cur.idx += 1u; return ret_val; } |
