diff options
| author | Brian Anderson <banderson@mozilla.com> | 2012-06-30 16:19:07 -0700 |
|---|---|---|
| committer | Brian Anderson <banderson@mozilla.com> | 2012-07-01 19:19:32 -0700 |
| commit | d1fc2b5995fdef69fe1dbdbba3703398c0aa547b (patch) | |
| tree | b6d14fd0e920512a29c64a671cc2a5bb90132abb /src/libsyntax/ext | |
| parent | 13a8f545388929a07af110a950441e6a1b88473a (diff) | |
| download | rust-d1fc2b5995fdef69fe1dbdbba3703398c0aa547b.tar.gz rust-d1fc2b5995fdef69fe1dbdbba3703398c0aa547b.zip | |
Convert to new closure syntax
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/auto_serialize.rs | 82 | ||||
| -rw-r--r-- | src/libsyntax/ext/build.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat_idents.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/earley_parser.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/ext/fmt.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/log_syntax.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/qquote.rs | 37 | ||||
| -rw-r--r-- | src/libsyntax/ext/simplext.rs | 64 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 4 |
10 files changed, 109 insertions, 116 deletions
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 487a435df46..d2d685f8f7d 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -100,7 +100,7 @@ fn expand(cx: ext_ctxt, with *item} } - do vec::flat_map(in_items) {|in_item| + do vec::flat_map(in_items) |in_item| { alt in_item.node { ast::item_ty(ty, tps, _) { vec::append(~[filter_attrs(in_item)], @@ -151,7 +151,7 @@ impl helpers for ext_ctxt { fn ty_fn(span: span, -input_tys: ~[@ast::ty], -output: @ast::ty) -> @ast::ty { - let args = do vec::map(input_tys) {|ty| + let args = do vec::map(input_tys) |ty| { {mode: ast::expl(ast::by_ref), ty: ty, ident: @"", @@ -237,12 +237,12 @@ impl helpers for ext_ctxt { fn lambda(blk: ast::blk) -> @ast::expr { let ext_cx = self; let blk_e = self.expr(blk.span, ast::expr_block(blk)); - #ast{ {|| $(blk_e) } } + #ast{ || $(blk_e) } } fn clone_folder() -> fold::ast_fold { fold::make_fold(@{ - new_id: {|_id| self.next_id()} + new_id: |_id| self.next_id() with *fold::default_ast_fold() }) } @@ -272,7 +272,7 @@ impl helpers for ext_ctxt { } let fld = fold::make_fold(@{ - new_span: {|a|repl_sp(a, ast_util::dummy_sp(), span)} + new_span: |a| repl_sp(a, ast_util::dummy_sp(), span) with *fold::default_ast_fold() }); @@ -294,11 +294,11 @@ fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path, ast::expr_path( cx.helper_path(path, "serialize"))); - let ty_args = do vec::map(path.types) {|ty| + let ty_args = do vec::map(path.types) |ty| { let sv_stmts = ser_ty(cx, tps, ty, cx.clone(s), #ast{ __v }); let sv = cx.expr(path.span, ast::expr_block(cx.blk(path.span, sv_stmts))); - cx.at(ty.span, #ast{ {|__v| $(sv)} }) + cx.at(ty.span, #ast{ |__v| $(sv) }) }; ~[cx.stmt( @@ -316,14 +316,14 @@ fn ser_variant(cx: ext_ctxt, bodyfn: fn(-@ast::expr, ast::blk) -> @ast::expr, argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr) -> ast::arm { - let vnames = do vec::from_fn(vec::len(tys)) {|i| + let vnames = do vec::from_fn(vec::len(tys)) |i| { @#fmt["__v%u", i] }; - let pats = do vec::from_fn(vec::len(tys)) {|i| + let pats = do vec::from_fn(vec::len(tys)) |i| { cx.binder_pat(tys[i].span, vnames[i]) }; let pat: @ast::pat = @{id: cx.next_id(), node: pfn(pats), span: span}; - let stmts = do vec::from_fn(vec::len(tys)) {|i| + let stmts = do vec::from_fn(vec::len(tys)) |i| { let v = cx.var_ref(span, vnames[i]); let arg_blk = cx.blk( @@ -376,7 +376,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, } ast::ty_rec(flds) { - let fld_stmts = do vec::from_fn(vec::len(flds)) {|fidx| + let fld_stmts = do vec::from_fn(vec::len(flds)) |fidx| { let fld = flds[fidx]; let vf = cx.expr(fld.span, ast::expr_field(cx.clone(v), @@ -412,17 +412,17 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, cx, tps, tys, ty.span, s, // Generate pattern (v1, v2, v3) - {|pats| ast::pat_tup(pats)}, + |pats| ast::pat_tup(pats), // Generate body s.emit_tup(3, {|| blk }) - {|-s, blk| + |-s, blk| { let sz = cx.lit_uint(ty.span, vec::len(tys)); let body = cx.lambda(blk); #ast{ $(s).emit_tup($(sz), $(body)) } }, // Generate s.emit_tup_elt(i, {|| blk }) - {|-s, i, blk| + |-s, i, blk| { let idx = cx.lit_uint(ty.span, i); let body = cx.lambda(blk); #ast{ $(s).emit_tup_elt($(idx), $(body)) } @@ -473,7 +473,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, cx.at(ty.span, #ast{ __e }))))); ~[#ast(stmt){ - std::serialization::emit_from_vec($(s), $(v), {|__e| $(ser_e) }) + std::serialization::emit_from_vec($(s), $(v), |__e| $(ser_e)) }] } @@ -491,17 +491,17 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, -> @ast::item { let ext_cx = cx; // required for #ast - let tp_types = vec::map(tps, {|tp| cx.ty_path(span, ~[tp.ident], ~[])}); + let tp_types = vec::map(tps, |tp| cx.ty_path(span, ~[tp.ident], ~[])); let v_ty = cx.ty_path(span, ~[name], tp_types); let tp_inputs = - vec::map(tps, {|tp| + vec::map(tps, |tp| {mode: ast::expl(ast::by_ref), ty: cx.ty_fn(span, ~[cx.ty_path(span, ~[tp.ident], ~[])], cx.ty_nil(span)), ident: @("__s" + *tp.ident), - id: cx.next_id()}}); + id: cx.next_id()}); #debug["tp_inputs = %?", tp_inputs]; @@ -518,7 +518,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tp_inputs); let tps_map = map::str_hash(); - do vec::iter2(tps, tp_inputs) {|tp, arg| + do vec::iter2(tps, tp_inputs) |tp, arg| { let arg_ident = arg.ident; tps_map.insert( *tp.ident, @@ -539,7 +539,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, vec::append(~[{ident: @"__S", id: cx.next_id(), bounds: ser_bnds}], - vec::map(tps, {|tp| cx.clone_ty_param(tp) })); + vec::map(tps, |tp| cx.clone_ty_param(tp))); let ser_output: @ast::ty = @{id: cx.next_id(), node: ast::ty_nil, @@ -575,7 +575,7 @@ fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path, ast::expr_path( cx.helper_path(path, "deserialize"))); - let ty_args = do vec::map(path.types) {|ty| + let ty_args = do vec::map(path.types) |ty| { let dv_expr = deser_ty(cx, tps, ty, cx.clone(d)); cx.lambda(cx.expr_blk(dv_expr)) }; @@ -618,7 +618,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map, } ast::ty_rec(flds) { - let fields = do vec::from_fn(vec::len(flds)) {|fidx| + let fields = do vec::from_fn(vec::len(flds)) |fidx| { let fld = flds[fidx]; let d = cx.clone(d); let f = cx.lit_str(fld.span, fld.node.ident); @@ -647,7 +647,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map, // d.read_tup_elt(2u, {||...})) // } - let arg_exprs = do vec::from_fn(vec::len(tys)) {|i| + let arg_exprs = do vec::from_fn(vec::len(tys)) |i| { let idx = cx.lit_uint(ty.span, i); let body = deser_lambda(cx, tps, tys[i], cx.clone(d)); #ast{ $(d).read_tup_elt($(idx), $(body)) } @@ -703,17 +703,17 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, -> @ast::item { let ext_cx = cx; // required for #ast - let tp_types = vec::map(tps, {|tp| cx.ty_path(span, ~[tp.ident], ~[])}); + let tp_types = vec::map(tps, |tp| cx.ty_path(span, ~[tp.ident], ~[])); let v_ty = cx.ty_path(span, ~[name], tp_types); let tp_inputs = - vec::map(tps, {|tp| + vec::map(tps, |tp| {mode: ast::expl(ast::by_ref), ty: cx.ty_fn(span, ~[], cx.ty_path(span, ~[tp.ident], ~[])), ident: @("__d" + *tp.ident), - id: cx.next_id()}}); + id: cx.next_id()}); #debug["tp_inputs = %?", tp_inputs]; @@ -725,7 +725,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, tp_inputs); let tps_map = map::str_hash(); - do vec::iter2(tps, tp_inputs) {|tp, arg| + do vec::iter2(tps, tp_inputs) |tp, arg| { let arg_ident = arg.ident; tps_map.insert( *tp.ident, @@ -745,7 +745,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, vec::append(~[{ident: @"__D", id: cx.next_id(), bounds: deser_bnds}], - vec::map(tps, {|tp| + vec::map(tps, |tp| { let cloned = cx.clone_ty_param(tp); {bounds: @(vec::append(*cloned.bounds, ~[ast::bound_copy])) @@ -774,8 +774,8 @@ fn ty_fns(cx: ext_ctxt, name: ast::ident, let span = ty.span; ~[ - mk_ser_fn(cx, span, name, tps, {|a,b,c,d|ser_ty(a, b, ty, c, d)}), - mk_deser_fn(cx, span, name, tps, {|a,b,c|deser_ty(a, b, ty, c)}) + mk_ser_fn(cx, span, name, tps, |a,b,c,d| ser_ty(a, b, ty, c, d)), + mk_deser_fn(cx, span, name, tps, |a,b,c| deser_ty(a, b, ty, c)) ] } @@ -783,17 +783,17 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, e_span: span, variants: ~[ast::variant], -s: @ast::expr, -v: @ast::expr) -> ~[@ast::stmt] { let ext_cx = cx; - let arms = do vec::from_fn(vec::len(variants)) {|vidx| + let arms = do vec::from_fn(vec::len(variants)) |vidx| { let variant = variants[vidx]; let v_span = variant.span; let v_name = variant.node.name; - let variant_tys = vec::map(variant.node.args, {|a| a.ty }); + let variant_tys = vec::map(variant.node.args, |a| a.ty); ser_variant( cx, tps, variant_tys, v_span, cx.clone(s), // Generate pattern var(v1, v2, v3) - {|pats| + |pats| { if vec::is_empty(pats) { ast::pat_ident(cx.path(v_span, ~[v_name]), none) } else { @@ -803,7 +803,7 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, // Generate body s.emit_enum_variant("foo", 0u, // 3u, {|| blk }) - {|-s, blk| + |-s, blk| { let v_name = cx.lit_str(v_span, v_name); let v_id = cx.lit_uint(v_span, vidx); let sz = cx.lit_uint(v_span, vec::len(variant_tys)); @@ -815,7 +815,7 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, }, // Generate s.emit_enum_variant_arg(i, {|| blk }) - {|-s, i, blk| + |-s, i, blk| { let idx = cx.lit_uint(v_span, i); let body = cx.lambda(blk); #ast[expr]{ @@ -832,13 +832,13 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident, e_span: span, variants: ~[ast::variant], -d: @ast::expr) -> @ast::expr { let ext_cx = cx; - let arms: ~[ast::arm] = do vec::from_fn(vec::len(variants)) {|vidx| + let arms: ~[ast::arm] = do vec::from_fn(vec::len(variants)) |vidx| { let variant = variants[vidx]; let v_span = variant.span; let v_name = variant.node.name; - let tys = vec::map(variant.node.args, {|a| a.ty }); + let tys = vec::map(variant.node.args, |a| a.ty); - let arg_exprs = do vec::from_fn(vec::len(tys)) {|i| + let arg_exprs = do vec::from_fn(vec::len(tys)) |i| { let idx = cx.lit_uint(v_span, i); let body = deser_lambda(cx, tps, tys[i], cx.clone(d)); #ast{ $(d).read_enum_variant_arg($(idx), $(body)) } @@ -866,7 +866,7 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident, let e_name = cx.lit_str(e_span, e_name); let alt_expr = cx.expr(e_span, ast::expr_alt(#ast{__i}, arms, ast::alt_check)); - let var_lambda = #ast{ {|__i| $(alt_expr)} }; + let var_lambda = #ast{ |__i| $(alt_expr) }; let read_var = #ast{ $(cx.clone(d)).read_enum_variant($(var_lambda)) }; let read_lambda = cx.lambda(cx.expr_blk(read_var)); #ast{ $(d).read_enum($(e_name), $(read_lambda)) } @@ -877,8 +877,8 @@ fn enum_fns(cx: ext_ctxt, e_name: ast::ident, e_span: span, -> ~[@ast::item] { ~[ mk_ser_fn(cx, e_span, e_name, tps, - {|a,b,c,d|ser_enum(a, b, e_name, e_span, variants, c, d)}), + |a,b,c,d| ser_enum(a, b, e_name, e_span, variants, c, d)), mk_deser_fn(cx, e_span, e_name, tps, - {|a,b,c|deser_enum(a, b, e_name, e_span, variants, c)}) + |a,b,c| deser_enum(a, b, e_name, e_span, variants, c)) ] } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 1621cf7db7c..872d1f5eff6 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -81,7 +81,7 @@ fn mk_rec_e(cx: ext_ctxt, sp: span, fields: ~[{ident: ast::ident, ex: @ast::expr}]) -> @ast::expr { let mut astfields: ~[ast::field] = ~[]; - for fields.each {|field| + for fields.each |field| { let ident = field.ident; let val = field.ex; let astfield = diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index e324994eeb4..a678304725d 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -4,7 +4,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args_no_max(cx,sp,arg,1u,"concat_idents"); let mut res = ""; - for args.each {|e| + for args.each |e| { res += *expr_to_ident(cx, e, "expected an ident"); } diff --git a/src/libsyntax/ext/earley_parser.rs b/src/libsyntax/ext/earley_parser.rs index 8f76e8308b9..a6e47e0941c 100644 --- a/src/libsyntax/ext/earley_parser.rs +++ b/src/libsyntax/ext/earley_parser.rs @@ -47,7 +47,7 @@ fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos { } fn count_names(ms: &[matcher]) -> uint { - vec::foldl(0u, ms, {|ct, m| + vec::foldl(0u, ms, |ct, m| { ct + alt m.node { mtc_tok(_) { 0u } mtc_rep(more_ms, _, _) { count_names(more_ms) } @@ -57,7 +57,7 @@ fn count_names(ms: &[matcher]) -> uint { fn new_matcher_pos(ms: ~[matcher], sep: option<token>) -> matcher_pos { ~{elts: ms, sep: sep, mut idx: 0u, mut up: matcher_pos_up(none), - matches: copy vec::from_fn(count_names(ms), {|_i| dvec::dvec()}) } + matches: copy vec::from_fn(count_names(ms), |_i| dvec::dvec()) } } /* logically, an arb_depth should contain only one kind of nonterminal */ @@ -106,7 +106,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) // I bet this is a perf problem: we're preemptively // doing a lot of array work that will get thrown away // most of the time. - for ei.matches.eachi() { |idx, elt| + for ei.matches.eachi() |idx, elt| { new_pos.matches[idx].push(@seq(elt.get())); } @@ -145,7 +145,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } let matches = vec::map(ei.matches, // fresh, same size: - {|_m| dvec::<@arb_depth>()}); + |_m| dvec::<@arb_depth>()); let ei_t <- ei; vec::push(cur_eis, ~{ elts: matchers, sep: sep, mut idx: 0u, @@ -165,7 +165,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) /* error messages here could be improved with links to orig. rules */ if tok == EOF { if eof_eis.len() == 1u { - let ret_val = vec::map(eof_eis[0u].matches, {|dv| dv.pop()}); + let ret_val = vec::map(eof_eis[0u].matches, |dv| dv.pop()); ret ret_val; /* success */ } else if eof_eis.len() > 1u { rdr.fatal("Ambiguity: multiple successful parses"); @@ -175,7 +175,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } else { if (bb_eis.len() > 0u && next_eis.len() > 0u) || bb_eis.len() > 1u { - let nts = str::connect(vec::map(bb_eis, {|ei| + let nts = str::connect(vec::map(bb_eis, |ei| { alt ei.elts[ei.idx].node { mtc_bb(_,name,_) { *name } _ { fail; } } }), " or "); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 1b7abc21394..a037d87166a 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -100,8 +100,8 @@ fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt, // For each item, look through the attributes. If any of them are // decorated with "item decorators", then use that function to transform // the item into a new set of items. - let new_items = do vec::flat_map(module.items) {|item| - do vec::foldr(item.attrs, ~[item]) {|attr, items| + let new_items = do vec::flat_map(module.items) |item| { + do vec::foldr(item.attrs, ~[item]) |attr, items| { let mname = alt attr.node.value.node { ast::meta_word(n) { n } ast::meta_name_value(n, _) { n } @@ -164,10 +164,10 @@ fn expand_crate(parse_sess: parse::parse_sess, let afp = default_ast_fold(); let cx: ext_ctxt = mk_ctxt(parse_sess, cfg); let f_pre = - @{fold_expr: {|a,b,c|expand_expr(exts, cx, a, b, c, afp.fold_expr)}, - fold_mod: {|a,b|expand_mod_items(exts, cx, a, b, afp.fold_mod)}, - fold_item: {|a,b|expand_item(cx, a, b, afp.fold_item)}, - new_span: {|a|new_span(cx, a)} + @{fold_expr: |a,b,c| expand_expr(exts, cx, a, b, c, afp.fold_expr), + fold_mod: |a,b| expand_mod_items(exts, cx, a, b, afp.fold_mod), + fold_item: |a,b| expand_item(cx, a, b, afp.fold_item), + new_span: |a|new_span(cx, a) with *afp}; let f = make_fold(f_pre); let cm = parse_expr_from_source_str("<core-macros>", diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 197de757d3d..acf055ccabd 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -51,7 +51,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr { fn make_flags(cx: ext_ctxt, sp: span, flags: ~[flag]) -> @ast::expr { let mut tmp_expr = make_rt_path_expr(cx, sp, @"flag_none"); - for flags.each {|f| + for flags.each |f| { let fstr = alt f { flag_left_justify { "flag_left_justify" } flag_left_zero_pad { "flag_left_zero_pad" } @@ -136,7 +136,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, option::none { } _ { cx.span_unimpl(sp, unsupported); } } - for cnv.flags.each {|f| + for cnv.flags.each |f| { alt f { flag_left_justify { } flag_sign_always { @@ -191,7 +191,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, some(p) { log(debug, "param: " + int::to_str(p, 10u)); } _ { #debug("param: none"); } } - for c.flags.each {|f| + for c.flags.each |f| { alt f { flag_left_justify { #debug("flag: left justify"); } flag_left_zero_pad { #debug("flag: left zero pad"); } @@ -246,7 +246,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let mut n = 0u; let mut piece_exprs = ~[]; let nargs = args.len(); - for pieces.each {|pc| + for pieces.each |pc| { alt pc { piece_string(s) { vec::push(piece_exprs, mk_str(cx, fmt_sp, s)); diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index cf80f7d11bd..d237cd33839 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -7,7 +7,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, cx.print_backtrace(); io::stdout().write_line( str::connect(vec::map(args, - {|&&ex| print::pprust::expr_to_str(ex)}), ", ") + |&&ex| print::pprust::expr_to_str(ex)), ", ") ); //trivial expression diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index 367a0dcea50..2bb8c27828c 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -97,17 +97,15 @@ impl of qq_helper for @ast::pat { fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt { - let v = @{visit_expr: {|node, &&cx, v| - visit_aq(node, "from_expr", cx, v)}, - visit_ty: {|node, &&cx, v| - visit_aq(node, "from_ty", cx, v)} + let v = @{visit_expr: |node, &&cx, v| visit_aq(node, "from_expr", cx, v), + visit_ty: |node, &&cx, v| visit_aq(node, "from_ty", cx, v) with *default_visitor()}; let cx = @{lo:lo, gather: dvec()}; node.visit(cx, mk_vt(v)); // FIXME (#2250): Maybe this is an overkill (merge_sort), it might // be better to just keep the gather array in sorted order. - do cx.gather.swap { |v| - vec::to_mut(std::sort::merge_sort({|a,b| a.lo < b.lo}, v)) + do cx.gather.swap |v| { + vec::to_mut(std::sort::merge_sort(|a,b| a.lo < b.lo, v)) }; ret cx; } @@ -132,7 +130,7 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, -> @ast::expr { let mut what = "expr"; - do option::iter(arg) {|arg| + do option::iter(arg) |arg| { let args: ~[@ast::expr] = alt arg.node { ast::expr_vec(elts, _) { elts } @@ -193,7 +191,7 @@ fn finish<T: qq_helper> let qcx = gather_anti_quotes(sp.lo, node); let cx = qcx; - for uint::range(1u, cx.gather.len()) {|i| + for uint::range(1u, cx.gather.len()) |i| { assert cx.gather[i-1u].lo < cx.gather[i].lo; // ^^ check that the vector is sorted assert cx.gather[i-1u].hi <= cx.gather[i].lo; @@ -205,7 +203,7 @@ fn finish<T: qq_helper> let mut state = active; let mut i = 0u, j = 0u; let g_len = cx.gather.len(); - do str::chars_iter(*str) {|ch| + do str::chars_iter(*str) |ch| { if (j < g_len && i == cx.gather[j].lo) { assert ch == '$'; let repl = #fmt("$%u ", j); @@ -229,14 +227,11 @@ fn finish<T: qq_helper> let cx = ecx; - let cfg_call = {|| - mk_call_(cx, sp, mk_access(cx, sp, ~[@"ext_cx"], @"cfg"), ~[]) - }; + let cfg_call = || mk_call_( + cx, sp, mk_access(cx, sp, ~[@"ext_cx"], @"cfg"), ~[]); - let parse_sess_call = {|| - mk_call_(cx, sp, - mk_access(cx, sp, ~[@"ext_cx"], @"parse_sess"), ~[]) - }; + let parse_sess_call = || mk_call_( + cx, sp, mk_access(cx, sp, ~[@"ext_cx"], @"parse_sess"), ~[]); let pcall = mk_call(cx,sp, ~[@"syntax", @"parse", @"parser", @@ -259,7 +254,7 @@ fn finish<T: qq_helper> rcall = mk_call(cx,sp, ~[@"syntax", @"ext", @"qquote", @"replace"], ~[pcall, - mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec({|g| + mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| { mk_call(cx,sp, ~[@"syntax", @"ext", @"qquote", @g.constr], @@ -275,10 +270,10 @@ fn replace<T>(node: T, repls: ~[fragment], ff: fn (ast_fold, T) -> T) -> T { let aft = default_ast_fold(); - let f_pre = @{fold_expr: {|a,b,c|replace_expr(repls, a, b, c, - aft.fold_expr)}, - fold_ty: {|a,b,c|replace_ty(repls, a, b, c, - aft.fold_ty)} + let f_pre = @{fold_expr: |a,b,c|replace_expr(repls, a, b, c, + aft.fold_expr), + fold_ty: |a,b,c|replace_ty(repls, a, b, c, + aft.fold_ty) with *aft}; ret ff(make_fold(f_pre), node); } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index cc3a4d04d06..2f811891711 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -74,7 +74,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) -> {pre: ~[@expr], rep: option<@expr>, post: ~[@expr]} { let mut idx: uint = 0u; let mut res = none; - for elts.each {|elt| + for elts.each |elt| { alt elt.node { expr_mac(m) { alt m.node { @@ -103,7 +103,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) -> fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: ~[T]) -> option<~[U]> { let mut res = ~[]; - for v.each {|elem| + for v.each |elem| { alt f(elem) { none { ret none; } some(fv) { vec::push(res, fv); } } } ret some(res); @@ -113,7 +113,7 @@ fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result { alt ad { leaf(x) { ret f(x); } seq(ads, span) { - alt option_flatten_map({|x| a_d_map(x, f)}, *ads) { + alt option_flatten_map(|x| a_d_map(x, f), *ads) { none { ret none; } some(ts) { ret some(seq(@ts, span)); } } @@ -128,7 +128,7 @@ fn compose_sels(s1: selector, s2: selector) -> selector { some(matches) { a_d_map(matches, s2) } } } - ret {|x|scomp(s1, s2, x)}; + ret { |x| scomp(s1, s2, x) }; } @@ -164,11 +164,11 @@ selectors. */ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> { let res = box_str_hash::<arb_depth<matchable>>(); //need to do this first, to check vec lengths. - for b.literal_ast_matchers.each {|sel| + for b.literal_ast_matchers.each |sel| { alt sel(match_expr(e)) { none { ret none; } _ { } } } let mut never_mind: bool = false; - for b.real_binders.each {|key, val| + for b.real_binders.each |key, val| { alt val(match_expr(e)) { none { never_mind = true; } some(mtc) { res.insert(key, mtc); } @@ -190,22 +190,22 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { } let afp = default_ast_fold(); let f_pre = - @{fold_ident: {|x,y|transcribe_ident(cx, b, idx_path, x, y)}, - fold_path: {|x,y|transcribe_path(cx, b, idx_path, x, y)}, - fold_expr: {|x,y,z| + @{fold_ident: |x,y|transcribe_ident(cx, b, idx_path, x, y), + fold_path: |x,y|transcribe_path(cx, b, idx_path, x, y), + fold_expr: |x,y,z| transcribe_expr(cx, b, idx_path, x, y, z, afp.fold_expr) - }, - fold_ty: {|x,y,z| + , + fold_ty: |x,y,z| transcribe_type(cx, b, idx_path, x, y, z, afp.fold_ty) - }, - fold_block: {|x,y,z| + , + fold_block: |x,y,z| transcribe_block(cx, b, idx_path, x, y, z, afp.fold_block) - }, - map_exprs: {|x,y| + , + map_exprs: |x,y| transcribe_exprs(cx, b, idx_path, x, y) - }, - new_id: {|x|new_id(x, cx)} + , + new_id: |x|new_id(x, cx) with *afp}; let f = make_fold(f_pre); let result = f.fold_expr(body); @@ -217,7 +217,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { fn follow(m: arb_depth<matchable>, idx_path: @mut ~[uint]) -> arb_depth<matchable> { let mut res: arb_depth<matchable> = m; - for vec::each(*idx_path) {|idx| + for vec::each(*idx_path) |idx| { res = alt res { leaf(_) { ret res;/* end of the line */ } seq(new_ms, _) { new_ms[idx] } @@ -255,11 +255,11 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) { // using fold is a hack: we want visit, but it doesn't hit idents ) : // solve this with macros let f_pre = - @{fold_ident: {|x,y|mark_ident(x, y, b, idents)} + @{fold_ident: |x,y|mark_ident(x, y, b, idents) with *default_ast_fold()}; let f = make_fold(f_pre); f.fold_expr(e); // ignore result - for idents.each_key {|x| it(x); }; + for idents.each_key |x| { it(x); }; } @@ -276,7 +276,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], let mut repeat: option<{rep_count: uint, name: ident}> = none; /* we need to walk over all the free vars in lockstep, except for the leaves, which are just duplicated */ - do free_vars(b, repeat_me) {|fv| + do free_vars(b, repeat_me) |fv| { let cur_pos = follow(b.get(fv), idx_path); alt cur_pos { leaf(_) { } @@ -481,7 +481,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { _ { cx.bug("broken traversal in p_t_s_r") } } } - b.literal_ast_matchers.push({|x|select(cx, x, e)}); + b.literal_ast_matchers.push(|x| select(cx, x, e)); } } } @@ -523,7 +523,7 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) { if b.real_binders.contains_key(p_id) { cx.span_fatal(p.span, "duplicate binding identifier"); } - b.real_binders.insert(p_id, compose_sels(s, {|x|select(cx, x)})); + b.real_binders.insert(p_id, compose_sels(s, |x| select(cx, x))); } none { } } @@ -568,7 +568,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) { _ { none } } } - let final_step = {|x|select_pt_1(cx, x, select_pt_2)}; + let final_step = |x| select_pt_1(cx, x, select_pt_2); b.real_binders.insert(id, compose_sels(s, final_step)); } none { no_des(cx, pth.span, "under `#<>`"); } @@ -588,7 +588,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) { _ { none } } } - let final_step = {|x|select_pt_1(cx, x, select_pt_2)}; + let final_step = |x| select_pt_1(cx, x, select_pt_2); b.real_binders.insert(id, compose_sels(s, final_step)); } none { no_des(cx, blk.span, "under `#{}`"); } @@ -625,7 +625,7 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector, } } p_t_s_rec(cx, match_expr(repeat_me), - compose_sels(s, {|x|select(cx, repeat_me, offset, x)}), b); + compose_sels(s, |x| select(cx, repeat_me, offset, x)), b); } @@ -649,7 +649,7 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector, } } b.literal_ast_matchers.push( - compose_sels(s, {|x|len_select(cx, x, at_least, len)})); + compose_sels(s, |x| len_select(cx, x, at_least, len))); } fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool, @@ -670,7 +670,7 @@ fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool, } } p_t_s_rec(cx, match_expr(elts[idx]), - compose_sels(s, {|x, copy idx|select(cx, x, idx)}), b); + compose_sels(s, |x, copy idx| select(cx, x, idx)), b); idx += 1u; } } @@ -681,7 +681,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, let mut macro_name: option<@str> = none; let mut clauses: ~[@clause] = ~[]; - for args.each {|arg| + for args.each |arg| { alt arg.node { expr_vec(elts, mutbl) { if vec::len(elts) != 2u { @@ -745,9 +745,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, } } - let ext = {|a,b,c,d, move clauses| - generic_extension(a,b,c,d,clauses) - }; + let ext = |a,b,c,d, move clauses| generic_extension(a,b,c,d,clauses); ret {ident: alt macro_name { @@ -766,7 +764,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, some(arg) { arg } none { cx.span_fatal(sp, "macro must have arguments")} }; - for clauses.each {|c| + for clauses.each |c| { alt use_selectors_to_bind(c.params, arg) { some(bindings) { ret transcribe(cx, bindings, c.body); } none { cont; } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 51feb53b2b3..ee5e96cc0e4 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -50,7 +50,7 @@ fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::some(0u), "file"); ret mk_lit(cx, sp, ast::lit_str( - @str::connect(cx.mod_path().map({|x|*x}), "::"))); + @str::connect(cx.mod_path().map(|x|*x), "::"))); } fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg, @@ -88,7 +88,7 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, alt io::read_whole_file(res_rel_file(cx, sp, file)) { result::ok(src) { - let u8_exprs = vec::map(src, { |char: u8| + let u8_exprs = vec::map(src, |char: u8| { mk_lit(cx, sp, ast::lit_uint(char as u64, ast::ty_u8)) }); ret mk_uniq_vec_e(cx, sp, u8_exprs); |
