From 92d39fe4d5e5ad3d2c2dcafe45eaf6e23edddfd7 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Sun, 12 May 2013 00:25:31 -0400 Subject: syntax: Remove #[allow(vecs_implicitly_copyable)] --- src/libsyntax/ext/tt/macro_parser.rs | 41 ++++++++++++++++++------------------ src/libsyntax/ext/tt/macro_rules.rs | 12 +++++------ src/libsyntax/ext/tt/transcribe.rs | 14 ++++++------ 3 files changed, 33 insertions(+), 34 deletions(-) (limited to 'src/libsyntax/ext/tt') diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 46b09aca8b2..aa211973f1c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -130,7 +130,6 @@ pub fn count_names(ms: &[matcher]) -> uint { }}) } -#[allow(non_implicitly_copyable_typarams)] pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) -> ~MatcherPos { let mut match_idx_hi = 0u; @@ -184,15 +183,15 @@ pub enum named_match { pub type earley_item = ~MatcherPos; -pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match]) +pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match]) -> HashMap { - fn n_rec(p_s: @mut ParseSess, m: matcher, res: ~[@named_match], + fn n_rec(p_s: @mut ParseSess, m: &matcher, res: &[@named_match], ret_val: &mut HashMap) { - match m { + match *m { codemap::spanned {node: match_tok(_), _} => (), codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => { - for (*more_ms).each() |next_m| { - n_rec(p_s, *next_m, res, ret_val) + for more_ms.each |next_m| { + n_rec(p_s, next_m, res, ret_val) }; } codemap::spanned { @@ -207,7 +206,7 @@ pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match]) } } let mut ret_val = HashMap::new(); - for ms.each() |m| { n_rec(p_s, *m, res, &mut ret_val) } + for ms.each |m| { n_rec(p_s, m, res, &mut ret_val) } return ret_val; } @@ -234,10 +233,10 @@ pub fn parse( sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: @reader, - ms: ~[matcher] + ms: &[matcher] ) -> parse_result { let mut cur_eis = ~[]; - cur_eis.push(initial_matcher_pos(copy ms, None, rdr.peek().sp.lo)); + cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo)); loop { let mut bb_eis = ~[]; // black-box parsed by parser.rs @@ -277,7 +276,7 @@ pub fn parse( // Only touch the binders we have actually bound for uint::range(ei.match_lo, ei.match_hi) |idx| { - let sub = ei.matches[idx]; + let sub = copy ei.matches[idx]; new_pos.matches[idx] .push(@matched_seq(sub, mk_sp(ei.sp_lo, @@ -410,31 +409,31 @@ pub fn parse( } } -pub fn parse_nt(p: &Parser, name: ~str) -> nonterminal { +pub fn parse_nt(p: &Parser, name: &str) -> nonterminal { match name { - ~"item" => match p.parse_item(~[]) { + "item" => match p.parse_item(~[]) { Some(i) => token::nt_item(i), None => p.fatal(~"expected an item keyword") }, - ~"block" => token::nt_block(p.parse_block()), - ~"stmt" => token::nt_stmt(p.parse_stmt(~[])), - ~"pat" => token::nt_pat(p.parse_pat(true)), - ~"expr" => token::nt_expr(p.parse_expr()), - ~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), + "block" => token::nt_block(p.parse_block()), + "stmt" => token::nt_stmt(p.parse_stmt(~[])), + "pat" => token::nt_pat(p.parse_pat(true)), + "expr" => token::nt_expr(p.parse_expr()), + "ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), // this could be handled like a token, since it is one - ~"ident" => match *p.token { + "ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " + token::to_str(p.reader.interner(), © *p.token)) }, - ~"path" => token::nt_path(p.parse_path_with_tps(false)), - ~"tt" => { + "path" => token::nt_path(p.parse_path_with_tps(false)), + "tt" => { *p.quote_depth += 1u; //but in theory, non-quoted tts might be useful let res = token::nt_tt(@p.parse_token_tree()); *p.quote_depth -= 1u; res } - ~"matchers" => token::nt_matchers(p.parse_matchers()), + "matchers" => token::nt_matchers(p.parse_matchers()), _ => p.fatal(~"Unsupported builtin nonterminal parser: " + name) } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 169652b1120..be6cc7a846a 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -63,19 +63,19 @@ pub fn add_new_extension(cx: @ext_ctxt, // Extract the arguments: let lhses = match *argument_map.get(&lhs_nm) { - @matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s, + @matched_seq(ref s, _) => /* FIXME (#2543) */ @copy *s, _ => cx.span_bug(sp, ~"wrong-structured lhs") }; let rhses = match *argument_map.get(&rhs_nm) { - @matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s, + @matched_seq(ref s, _) => /* FIXME (#2543) */ @copy *s, _ => cx.span_bug(sp, ~"wrong-structured rhs") }; // Given `lhses` and `rhses`, this is the new macro we create fn generic_extension(cx: @ext_ctxt, sp: span, name: ident, arg: &[ast::token_tree], - lhses: ~[@named_match], rhses: ~[@named_match]) + lhses: &[@named_match], rhses: &[@named_match]) -> MacResult { if cx.trace_macros() { @@ -93,7 +93,7 @@ pub fn add_new_extension(cx: @ext_ctxt, let s_d = cx.parse_sess().span_diagnostic; let itr = cx.parse_sess().interner; - for lhses.eachi() |i, lhs| { // try each arm's matchers + for lhses.eachi |i, lhs| { // try each arm's matchers match *lhs { @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating @@ -103,7 +103,7 @@ pub fn add_new_extension(cx: @ext_ctxt, None, vec::to_owned(arg) ) as @reader; - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) { success(named_matches) => { let rhs = match rhses[i] { // okay, what's your transcriber? @@ -146,7 +146,7 @@ pub fn add_new_extension(cx: @ext_ctxt, } let exp: @fn(@ext_ctxt, span, &[ast::token_tree]) -> MacResult = - |cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses); + |cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses); return MRDef(MacroDef{ name: copy *cx.parse_sess().interner.get(name), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 19c83e21a86..438efb2326c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -91,11 +91,11 @@ pub fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader { sp_diag: r.sp_diag, interner: r.interner, stack: dup_tt_frame(r.stack), - interpolations: r.interpolations, repeat_idx: copy r.repeat_idx, repeat_len: copy r.repeat_len, cur_tok: copy r.cur_tok, - cur_span: r.cur_span + cur_span: r.cur_span, + interpolations: copy r.interpolations, } } @@ -127,7 +127,7 @@ enum lis { lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str) } -fn lockstep_iter_size(t: token_tree, r: &mut TtReader) -> lis { +fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis { fn lis_merge(lhs: lis, rhs: lis, r: &mut TtReader) -> lis { match lhs { lis_unconstrained => copy rhs, @@ -146,10 +146,10 @@ fn lockstep_iter_size(t: token_tree, r: &mut TtReader) -> lis { } } } - match t { + match *t { tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => { - vec::foldl(lis_unconstrained, (*tts), |lis, tt| { - let lis2 = lockstep_iter_size(*tt, r); + vec::foldl(lis_unconstrained, *tts, |lis, tt| { + let lis2 = lockstep_iter_size(tt, r); lis_merge(lis, lis2, r) }) } @@ -230,7 +230,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } tt_seq(sp, copy tts, copy sep, zerok) => { let t = tt_seq(sp, copy tts, copy sep, zerok); - match lockstep_iter_size(t, r) { + match lockstep_iter_size(&t, r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ -- cgit 1.4.1-3-g733a5