diff options
| author | Brian Anderson <banderson@mozilla.com> | 2012-08-01 17:30:05 -0700 |
|---|---|---|
| committer | Brian Anderson <banderson@mozilla.com> | 2012-08-01 19:16:06 -0700 |
| commit | b355936b4da0831f47afe8f251daee503c8caa32 (patch) | |
| tree | 9f870e26f773af714cbcf7f315de5ff3722300c3 /src/libsyntax/ext | |
| parent | dc499f193e473abc78c557feaa86969bbe7aa159 (diff) | |
| download | rust-b355936b4da0831f47afe8f251daee503c8caa32.tar.gz rust-b355936b4da0831f47afe8f251daee503c8caa32.zip | |
Convert ret to return
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/ext/build.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat_idents.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/env.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/fmt.rs | 54 | ||||
| -rw-r--r-- | src/libsyntax/ext/ident_to_str.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/log_syntax.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/parse_proto.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/pipes/proto.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/qquote.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/simplext.rs | 84 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 20 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/earley_parser.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 14 |
16 files changed, 136 insertions, 125 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index e8505387fa8..2947201003f 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -107,7 +107,7 @@ fn syntax_expander_table() -> hashmap<~str, syntax_extension> { builtin(ext::source_util::expand_mod)); syntax_expanders.insert(~"proto", builtin_item_tt(ext::pipes::expand_proto)); - ret syntax_expanders; + return syntax_expanders; } @@ -148,7 +148,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn backtrace() -> expn_info { self.backtrace } fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); } fn mod_pop() { vec::pop(self.mod_path); } - fn mod_path() -> ~[ast::ident] { ret self.mod_path; } + fn mod_path() -> ~[ast::ident] { return self.mod_path; } fn bt_push(ei: codemap::expn_info_) { alt ei { expanded_from({call_site: cs, callie: callie}) { @@ -193,7 +193,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, self.parse_sess.span_diagnostic.handler().bug(msg); } fn next_id() -> ast::node_id { - ret parse::next_node_id(self.parse_sess); + return parse::next_node_id(self.parse_sess); } } let imp : ctxt_repr = { @@ -202,14 +202,14 @@ fn mk_ctxt(parse_sess: parse::parse_sess, mut backtrace: none, mut mod_path: ~[] }; - ret imp as ext_ctxt + return imp as ext_ctxt } fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ~str { alt expr.node { ast::expr_lit(l) { alt l.node { - ast::lit_str(s) { ret *s; } + ast::lit_str(s) { return *s; } _ { cx.span_fatal(l.span, error); } } } @@ -222,7 +222,7 @@ fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ast::ident { ast::expr_path(p) { if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { cx.span_fatal(expr.span, error); - } else { ret p.idents[0]; } + } else { return p.idents[0]; } } _ { cx.span_fatal(expr.span, error); } } @@ -230,7 +230,7 @@ fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ast::ident { fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg, min: uint, name: ~str) -> ~[@ast::expr] { - ret get_mac_args(cx, sp, arg, min, none, name); + return get_mac_args(cx, sp, arg, min, none, name); } fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg, @@ -250,7 +250,7 @@ fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg, cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.", name, min}); } - _ { ret elts; /* we're good */} + _ { return elts; /* we're good */} } } _ { @@ -308,7 +308,7 @@ fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree]) _ { fail ~"badly-structured parse result"; } }; - ret some(@{id: parse::next_node_id(cx.parse_sess()), + return some(@{id: parse::next_node_id(cx.parse_sess()), callee_id: parse::next_node_id(cx.parse_sess()), node: ast::expr_vec(args, ast::m_imm), span: sp}); } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 5eca1e8e17c..ab2d93faabe 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -3,7 +3,7 @@ import base::ext_ctxt; fn mk_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) -> @ast::expr { - ret @{id: cx.next_id(), callee_id: cx.next_id(), + return @{id: cx.next_id(), callee_id: cx.next_id(), node: expr, span: sp}; } @@ -13,15 +13,15 @@ fn mk_lit(cx: ext_ctxt, sp: span, lit: ast::lit_) -> @ast::expr { } fn mk_int(cx: ext_ctxt, sp: span, i: int) -> @ast::expr { let lit = ast::lit_int(i as i64, ast::ty_i); - ret mk_lit(cx, sp, lit); + return mk_lit(cx, sp, lit); } fn mk_uint(cx: ext_ctxt, sp: span, u: uint) -> @ast::expr { let lit = ast::lit_uint(u as u64, ast::ty_u); - ret mk_lit(cx, sp, lit); + return mk_lit(cx, sp, lit); } fn mk_u8(cx: ext_ctxt, sp: span, u: u8) -> @ast::expr { let lit = ast::lit_uint(u as u64, ast::ty_u8); - ret mk_lit(cx, sp, lit); + return mk_lit(cx, sp, lit); } fn mk_binary(cx: ext_ctxt, sp: span, op: ast::binop, lhs: @ast::expr, rhs: @ast::expr) @@ -48,7 +48,7 @@ fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident) fn mk_access(cx: ext_ctxt, sp: span, p: ~[ast::ident], m: ast::ident) -> @ast::expr { let pathexpr = mk_path(cx, sp, p); - ret mk_access_(cx, sp, pathexpr, m); + return mk_access_(cx, sp, pathexpr, m); } fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr, args: ~[@ast::expr]) -> @ast::expr { @@ -57,7 +57,7 @@ fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr, fn mk_call(cx: ext_ctxt, sp: span, fn_path: ~[ast::ident], args: ~[@ast::expr]) -> @ast::expr { let pathexpr = mk_path(cx, sp, fn_path); - ret mk_call_(cx, sp, pathexpr, args); + return mk_call_(cx, sp, pathexpr, args); } // e = expr, t = type fn mk_base_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) -> @@ -79,7 +79,7 @@ fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) -> } fn mk_base_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr { let lit = ast::lit_str(@s); - ret mk_lit(cx, sp, lit); + return mk_lit(cx, sp, lit); } fn mk_uniq_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::vstore_uniq) diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index b3545cc635d..f5d22e6754c 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -8,7 +8,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, res += *expr_to_ident(cx, e, ~"expected an ident"); } - ret @{id: cx.next_id(), + return @{id: cx.next_id(), callee_id: cx.next_id(), node: ast::expr_path(@{span: sp, global: false, idents: ~[@res], rp: none, types: ~[]}), diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index e03fc2ce47b..4aa55e88f16 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -17,8 +17,8 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let var = expr_to_str(cx, args[0], ~"#env requires a string"); alt os::getenv(var) { - option::none { ret mk_uniq_str(cx, sp, ~""); } - option::some(s) { ret mk_uniq_str(cx, sp, s); } + option::none { return mk_uniq_str(cx, sp, ~""); } + option::some(s) { return mk_uniq_str(cx, sp, s); } } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 62e5841a749..24cc78e366e 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -15,7 +15,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, orig: fn@(expr_, span, ast_fold) -> (expr_, span)) -> (expr_, span) { - ret alt e { + return alt e { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. expr_mac(mac) { @@ -159,7 +159,7 @@ fn expand_mod_items(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, } }; - ret {items: new_items with module_}; + return {items: new_items with module_}; } @@ -185,9 +185,9 @@ fn expand_item(exts: hashmap<~str, syntax_extension>, if is_mod { cx.mod_push(it.ident); } let ret_val = orig(it, fld); if is_mod { cx.mod_pop(); } - ret ret_val; + return ret_val; } - none { ret none; } + none { return none; } } } @@ -221,7 +221,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>, } }; cx.bt_pop(); - ret maybe_it + return maybe_it } _ { cx.span_fatal(it.span, fmt!{"%s is not a legal here", *extname}) } @@ -235,7 +235,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>, fn new_span(cx: ext_ctxt, sp: span) -> span { /* this discards information in the case of macro-defining macros */ - ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; + return {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; } // FIXME (#2247): this is a terrible kludge to inject some macros into @@ -244,7 +244,7 @@ fn new_span(cx: ext_ctxt, sp: span) -> span { // compiled part of libcore at very least. fn core_macros() -> ~str { - ret + return ~"{ #macro[[#error[f, ...], log(core::error, #fmt[f, ...])]]; #macro[[#warn[f, ...], log(core::warn, #fmt[f, ...])]]; @@ -275,7 +275,7 @@ fn expand_crate(parse_sess: parse::parse_sess, f.fold_expr(cm); let res = @f.fold_crate(*c); - ret res; + return res; } // Local Variables: // mode: rust diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index d8549ca21c8..10820664344 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -27,7 +27,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg, parse_fmt_err_(cx, fmtspan, s) }; let pieces = parse_fmt_string(fmt, parse_fmt_err); - ret pieces_to_expr(cx, sp, pieces, args); + return pieces_to_expr(cx, sp, pieces, args); } // FIXME (#2249): A lot of these functions for producing expressions can @@ -38,12 +38,12 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: ~[piece], args: ~[@ast::expr]) -> @ast::expr { fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> ~[ast::ident] { - ret ~[@~"extfmt", @~"rt", ident]; + return ~[@~"extfmt", @~"rt", ident]; } fn make_rt_path_expr(cx: ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr { let path = make_path_vec(cx, ident); - ret mk_path(cx, sp, path); + return mk_path(cx, sp, path); } // Produces an AST expression that represents a RT::conv record, // which tells the RT::conv* functions how to perform the conversion @@ -62,18 +62,18 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, tmp_expr = mk_binary(cx, sp, ast::bitor, tmp_expr, make_rt_path_expr(cx, sp, @fstr)); } - ret tmp_expr; + return tmp_expr; } fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr { alt cnt { count_implied { - ret make_rt_path_expr(cx, sp, @~"count_implied"); + return make_rt_path_expr(cx, sp, @~"count_implied"); } count_is(c) { let count_lit = mk_int(cx, sp, c); let count_is_path = make_path_vec(cx, @~"count_is"); let count_is_args = ~[count_lit]; - ret mk_call(cx, sp, count_is_path, count_is_args); + return mk_call(cx, sp, count_is_path, count_is_args); } _ { cx.span_unimpl(sp, ~"unimplemented #fmt conversion"); } } @@ -91,12 +91,12 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, ty_octal { rt_type = ~"ty_octal"; } _ { rt_type = ~"ty_default"; } } - ret make_rt_path_expr(cx, sp, @rt_type); + return make_rt_path_expr(cx, sp, @rt_type); } fn make_conv_rec(cx: ext_ctxt, sp: span, flags_expr: @ast::expr, width_expr: @ast::expr, precision_expr: @ast::expr, ty_expr: @ast::expr) -> @ast::expr { - ret mk_rec_e(cx, sp, + return mk_rec_e(cx, sp, ~[{ident: @~"flags", ex: flags_expr}, {ident: @~"width", ex: width_expr}, {ident: @~"precision", ex: precision_expr}, @@ -106,7 +106,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let rt_conv_width = make_count(cx, sp, cnv.width); let rt_conv_precision = make_count(cx, sp, cnv.precision); let rt_conv_ty = make_ty(cx, sp, cnv.ty); - ret make_conv_rec(cx, sp, rt_conv_flags, rt_conv_width, + return make_conv_rec(cx, sp, rt_conv_flags, rt_conv_width, rt_conv_precision, rt_conv_ty); } fn make_conv_call(cx: ext_ctxt, sp: span, conv_type: ~str, cnv: conv, @@ -115,7 +115,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, let path = make_path_vec(cx, @fname); let cnv_expr = make_rt_conv_expr(cx, sp, cnv); let args = ~[cnv_expr, arg]; - ret mk_call(cx, arg.span, path, args); + return mk_call(cx, arg.span, path, args); } fn make_new_conv(cx: ext_ctxt, sp: span, cnv: conv, arg: @ast::expr) -> @@ -125,10 +125,10 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, fn is_signed_type(cnv: conv) -> bool { alt cnv.ty { ty_int(s) { - alt s { signed { ret true; } unsigned { ret false; } } + alt s { signed { return true; } unsigned { return false; } } } - ty_float { ret true; } - _ { ret false; } + ty_float { return true; } + _ { return false; } } } let unsupported = ~"conversion not supported in #fmt string"; @@ -168,22 +168,28 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, _ { cx.span_unimpl(sp, unsupported); } } alt cnv.ty { - ty_str { ret make_conv_call(cx, arg.span, ~"str", cnv, arg); } + ty_str { return make_conv_call(cx, arg.span, ~"str", cnv, arg); } ty_int(sign) { alt sign { - signed { ret make_conv_call(cx, arg.span, ~"int", cnv, arg); } + signed { + return make_conv_call(cx, arg.span, ~"int", cnv, arg); + } unsigned { - ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); + return make_conv_call(cx, arg.span, ~"uint", cnv, arg); } } } - ty_bool { ret make_conv_call(cx, arg.span, ~"bool", cnv, arg); } - ty_char { ret make_conv_call(cx, arg.span, ~"char", cnv, arg); } - ty_hex(_) { ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); } - ty_bits { ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); } - ty_octal { ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); } - ty_float { ret make_conv_call(cx, arg.span, ~"float", cnv, arg); } - ty_poly { ret make_conv_call(cx, arg.span, ~"poly", cnv, arg); } + ty_bool { return make_conv_call(cx, arg.span, ~"bool", cnv, arg); } + ty_char { return make_conv_call(cx, arg.span, ~"char", cnv, arg); } + ty_hex(_) { + return make_conv_call(cx, arg.span, ~"uint", cnv, arg); + } + ty_bits { return make_conv_call(cx, arg.span, ~"uint", cnv, arg); } + ty_octal { return make_conv_call(cx, arg.span, ~"uint", cnv, arg); } + ty_float { + return make_conv_call(cx, arg.span, ~"float", cnv, arg); + } + ty_poly { return make_conv_call(cx, arg.span, ~"poly", cnv, arg); } } } fn log_conv(c: conv) { @@ -275,7 +281,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, } let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs); - ret mk_call(cx, fmt_sp, ~[@~"str", @~"concat"], ~[arg_vec]); + return mk_call(cx, fmt_sp, ~[@~"str", @~"concat"], ~[arg_vec]); } // // Local Variables: diff --git a/src/libsyntax/ext/ident_to_str.rs b/src/libsyntax/ext/ident_to_str.rs index 54f97912f3d..06faff9ee1b 100644 --- a/src/libsyntax/ext/ident_to_str.rs +++ b/src/libsyntax/ext/ident_to_str.rs @@ -6,6 +6,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args(cx,sp,arg,1u,option::some(1u),~"ident_to_str"); - ret mk_uniq_str(cx, sp, *expr_to_ident(cx, args[0u], + return mk_uniq_str(cx, sp, *expr_to_ident(cx, args[0u], ~"expected an ident")); } diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 70d83b164c8..f84e496be9c 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -11,6 +11,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, ); //trivial expression - ret @{id: cx.next_id(), callee_id: cx.next_id(), + return @{id: cx.next_id(), callee_id: cx.next_id(), node: ast::expr_rec(~[], option::none), span: sp}; } diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 0375f742b4b..ea8c8c04b0e 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -19,7 +19,7 @@ impl proto_parser of proto_parser for parser { {sep: none, trailing_sep_allowed: false}, |self| self.parse_state(proto)); - ret proto; + return proto; } fn parse_state(proto: protocol) { diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index b9f77ea3fc2..1805fd9fa35 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -149,7 +149,7 @@ class protocol_ { fn has_ty_params() -> bool { for self.states.each |s| { if s.ty_params.len() > 0 { - ret true; + return true; } } false diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index 94753ea88e0..1c3e0aa5181 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -112,7 +112,7 @@ fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt do cx.gather.swap |v| { vec::to_mut(std::sort::merge_sort(|a,b| a.lo < b.lo, v)) }; - ret cx; + return cx; } fn visit_aq<T:qq_helper>(node: T, constr: ~str, &&cx: aq_ctxt, v: vt<aq_ctxt>) @@ -155,7 +155,7 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, } let body = get_mac_body(ecx,_sp,body); - ret alt what { + return alt what { ~"crate" {finish(ecx, body, parse_crate)} ~"expr" {finish(ecx, body, parse_expr)} ~"ty" {finish(ecx, body, parse_ty)} @@ -268,7 +268,7 @@ fn finish<T: qq_helper> ~[@~"syntax", @~"ext", @~"qquote", @node.get_fold_fn()])]); } - ret rcall; + return rcall; } fn replace<T>(node: T, repls: ~[fragment], ff: fn (ast_fold, T) -> T) @@ -280,7 +280,7 @@ fn replace<T>(node: T, repls: ~[fragment], ff: fn (ast_fold, T) -> T) fold_ty: |a,b,c|replace_ty(repls, a, b, c, aft.fold_ty) with *aft}; - ret ff(make_fold(f_pre), node); + return ff(make_fold(f_pre), node); } fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate { @f.fold_crate(*n) diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index b261e7657a2..b835300d5c7 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -13,9 +13,9 @@ export add_new_extension; fn path_to_ident(pth: @path) -> option<ident> { if vec::len(pth.idents) == 1u && vec::len(pth.types) == 0u { - ret some(pth.idents[0u]); + return some(pth.idents[0u]); } - ret none; + return none; } //a vec of binders might be a little big. @@ -94,7 +94,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) -> } idx += 1u; } - ret alt res { + return alt res { some(val) { val } none { {pre: elts, rep: none, post: ~[]} } } @@ -104,18 +104,18 @@ fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: ~[T]) -> option<~[U]> { let mut res = ~[]; for v.each |elem| { - alt f(elem) { none { ret none; } some(fv) { vec::push(res, fv); } } + alt f(elem) { none { return none; } some(fv) { vec::push(res, fv); } } } - ret some(res); + return some(res); } fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result { alt ad { - leaf(x) { ret f(x); } + leaf(x) { return f(x); } seq(ads, span) { alt option_flatten_map(|x| a_d_map(x, f), *ads) { - none { ret none; } - some(ts) { ret some(seq(@ts, span)); } + none { return none; } + some(ts) { return some(seq(@ts, span)); } } } } @@ -123,12 +123,12 @@ fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result { fn compose_sels(s1: selector, s2: selector) -> selector { fn scomp(s1: selector, s2: selector, m: matchable) -> match_result { - ret alt s1(m) { + return alt s1(m) { none { none } some(matches) { a_d_map(matches, s2) } } } - ret { |x| scomp(s1, s2, x) }; + return { |x| scomp(s1, s2, x) }; } @@ -150,9 +150,11 @@ fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders { literal_ast_matchers: dvec()}; //this oughta return binders instead, but macro args are a sequence of //expressions, rather than a single expression - fn trivial_selector(m: matchable) -> match_result { ret some(leaf(m)); } + fn trivial_selector(m: matchable) -> match_result { + return some(leaf(m)); + } p_t_s_rec(cx, match_expr(e), trivial_selector, res); - ret res; + return res; } @@ -165,7 +167,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> { let res = box_str_hash::<arb_depth<matchable>>(); //need to do this first, to check vec lengths. for b.literal_ast_matchers.each |sel| { - alt sel(match_expr(e)) { none { ret none; } _ { } } + alt sel(match_expr(e)) { none { return none; } _ { } } } let mut never_mind: bool = false; for b.real_binders.each |key, val| { @@ -175,18 +177,18 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> { } }; //HACK: `ret` doesn't work in `for each` - if never_mind { ret none; } - ret some(res); + if never_mind { return none; } + return some(res); } /* use the bindings on the body to generate the expanded code */ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { let idx_path: @mut ~[uint] = @mut ~[]; - fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { ret cx.next_id(); } + fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { return cx.next_id(); } fn new_span(cx: ext_ctxt, sp: span) -> span { /* this discards information in the case of macro-defining macros */ - ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; + return {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; } let afp = default_ast_fold(); let f_pre = @@ -209,7 +211,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { with *afp}; let f = make_fold(f_pre); let result = f.fold_expr(body); - ret result; + return result; } @@ -219,25 +221,25 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut ~[uint]) -> let mut res: arb_depth<matchable> = m; for vec::each(*idx_path) |idx| { res = alt res { - leaf(_) { ret res;/* end of the line */ } + leaf(_) { return res;/* end of the line */ } seq(new_ms, _) { new_ms[idx] } } } - ret res; + return res; } fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>, idx_path: @mut ~[uint]) -> option<matchable> { alt mmaybe { - none { ret none } + none { return none } some(m) { - ret alt follow(m, idx_path) { + return alt follow(m, idx_path) { seq(_, sp) { cx.span_fatal(sp, ~"syntax matched under ... but not " + ~"used that way.") } - leaf(m) { ret some(m) } + leaf(m) { return some(m) } } } } @@ -250,7 +252,7 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) { fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings, idents: hashmap<ident, ()>) -> ident { if b.contains_key(i) { idents.insert(i, ()); } - ret i; + return i; } // using fold is a hack: we want visit, but it doesn't hit idents ) : // solve this with macros @@ -319,7 +321,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], } } res = vec::append(res, vec::map(post, recur)); - ret res; + return res; } } } @@ -329,7 +331,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], // substitute, in a position that's required to be an ident fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], &&i: ident, _fld: ast_fold) -> ident { - ret alt follow_for_trans(cx, b.find(i), idx_path) { + return alt follow_for_trans(cx, b.find(i), idx_path) { some(match_ident(a_id)) { a_id.node } some(m) { match_error(cx, m, ~"an identifier") } none { i } @@ -340,7 +342,7 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], p: path, _fld: ast_fold) -> path { // Don't substitute into qualified names. - if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { ret p; } + if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { return p; } alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) { some(match_ident(id)) { {span: id.span, global: false, idents: ~[id.node], @@ -358,7 +360,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span)) -> (ast::expr_, span) { - ret alt e { + return alt e { expr_path(p) { // Don't substitute into qualified names. if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { @@ -387,7 +389,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span)) -> (ast::ty_, span) { - ret alt t { + return alt t { ast::ty_path(pth, _) { alt path_to_ident(pth) { some(id) { @@ -413,7 +415,7 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], orig: fn@(blk_, span, ast_fold) -> (blk_, span)) -> (blk_, span) { - ret alt block_to_ident(blk) { + return alt block_to_ident(blk) { some(id) { alt follow_for_trans(cx, b.find(id), idx_path) { some(match_block(new_blk)) { (new_blk.node, new_blk.span) } @@ -474,7 +476,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { _ { fn select(cx: ext_ctxt, m: matchable, pat: @expr) -> match_result { - ret alt m { + return alt m { match_expr(e) { if e == pat { some(leaf(match_exact)) } else { none } } @@ -494,7 +496,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) { /* make a match more precise */ fn specialize_match(m: matchable) -> matchable { - ret alt m { + return alt m { match_expr(e) { alt e.node { expr_path(pth) { @@ -515,7 +517,7 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) { alt path_to_ident(p) { some(p_id) { fn select(cx: ext_ctxt, m: matchable) -> match_result { - ret alt m { + return alt m { match_expr(e) { some(leaf(specialize_match(m))) } _ { cx.bug(~"broken traversal in p_t_s_r") } } @@ -530,8 +532,8 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) { } fn block_to_ident(blk: blk_) -> option<ident> { - if vec::len(blk.stmts) != 0u { ret none; } - ret alt blk.expr { + if vec::len(blk.stmts) != 0u { return none; } + return alt blk.expr { some(expr) { alt expr.node { expr_path(pth) { path_to_ident(pth) } _ { none } } } @@ -542,7 +544,7 @@ fn block_to_ident(blk: blk_) -> option<ident> { fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) { fn select_pt_1(cx: ext_ctxt, m: matchable, fn_m: fn(ast::mac) -> match_result) -> match_result { - ret alt m { + return alt m { match_expr(e) { alt e.node { expr_mac(mac) { fn_m(mac) } _ { none } } } @@ -565,7 +567,7 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector, b: binders) { fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) -> match_result { - ret alt m { + return alt m { match_expr(e) { alt e.node { expr_vec(arg_elts, _) { @@ -595,7 +597,7 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector, b: binders) { fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) -> match_result { - ret alt m { + return alt m { match_expr(e) { alt e.node { expr_vec(arg_elts, _) { @@ -619,7 +621,7 @@ fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool, let mut idx: uint = 0u; while idx < vec::len(elts) { fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result { - ret alt m { + return alt m { match_expr(e) { alt e.node { expr_vec(arg_elts, _) { @@ -709,7 +711,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, let ext = |a,b,c,d, move clauses| generic_extension(a,b,c,d,clauses); - ret {ident: + return {ident: alt macro_name { some(id) { id } none { @@ -728,7 +730,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, }; for clauses.each |c| { alt use_selectors_to_bind(c.params, arg) { - some(bindings) { ret transcribe(cx, bindings, c.body); } + some(bindings) { return transcribe(cx, bindings, c.body); } none { again; } } } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 226292086f8..00c1e4ff47a 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -18,7 +18,7 @@ fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"line"); let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo); - ret mk_uint(cx, sp, loc.line); + return mk_uint(cx, sp, loc.line); } /* col!{}: expands to the current column number */ @@ -26,7 +26,7 @@ fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"col"); let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo); - ret mk_uint(cx, sp, loc.col); + return mk_uint(cx, sp, loc.col); } /* file!{}: expands to the current filename */ @@ -37,19 +37,19 @@ fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg, get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"file"); let { file: @{ name: filename, _ }, _ } = codemap::lookup_char_pos(cx.codemap(), sp.lo); - ret mk_uniq_str(cx, sp, filename); + return mk_uniq_str(cx, sp, filename); } fn expand_stringify(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), ~"stringify"); - ret mk_uniq_str(cx, sp, pprust::expr_to_str(args[0])); + return mk_uniq_str(cx, sp, pprust::expr_to_str(args[0])); } fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"file"); - ret mk_uniq_str(cx, sp, + return mk_uniq_str(cx, sp, str::connect(cx.mod_path().map(|x|*x), ~"::")); } @@ -60,7 +60,7 @@ fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg, let p = parse::new_parser_from_file(cx.parse_sess(), cx.cfg(), res_rel_file(cx, sp, file), parse::parser::SOURCE_FILE); - ret p.parse_expr(); + return p.parse_expr(); } fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, @@ -77,7 +77,7 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, } } - ret mk_uniq_str(cx, sp, result::unwrap(res)); + return mk_uniq_str(cx, sp, result::unwrap(res)); } fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, @@ -91,7 +91,7 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, let u8_exprs = vec::map(src, |char: u8| { mk_u8(cx, sp, char) }); - ret mk_uniq_vec_e(cx, sp, u8_exprs); + return mk_uniq_vec_e(cx, sp, u8_exprs); } result::err(e) { cx.parse_sess().span_diagnostic.handler().fatal(e) @@ -104,9 +104,9 @@ fn res_rel_file(cx: ext_ctxt, sp: codemap::span, +arg: path) -> path { if !path::path_is_absolute(arg) { let cu = codemap::span_to_filename(sp, cx.codemap()); let dir = path::dirname(cu); - ret path::connect(dir, arg); + return path::connect(dir, arg); } else { - ret arg; + return arg; } } diff --git a/src/libsyntax/ext/tt/earley_parser.rs b/src/libsyntax/ext/tt/earley_parser.rs index ed4e2e44f08..6930c09e7ce 100644 --- a/src/libsyntax/ext/tt/earley_parser.rs +++ b/src/libsyntax/ext/tt/earley_parser.rs @@ -129,7 +129,7 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) } let ret_val = box_str_hash::<@named_match>(); for ms.each() |m| { n_rec(p_s, m, res, ret_val) } - ret ret_val; + return ret_val; } enum parse_result { @@ -260,13 +260,13 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) /* error messages here could be improved with links to orig. rules */ if tok == EOF { if eof_eis.len() == 1u { - ret success( + return success( nameize(sess, ms, vec::map(eof_eis[0u].matches, |dv| dv.pop()))); } else if eof_eis.len() > 1u { - ret failure(sp, ~"Ambiguity: multiple successful parses"); + return failure(sp, ~"Ambiguity: multiple successful parses"); } else { - ret failure(sp, ~"Unexpected end of macro invocation"); + return failure(sp, ~"Unexpected end of macro invocation"); } } else { if (bb_eis.len() > 0u && next_eis.len() > 0u) @@ -277,12 +277,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) fmt!{"%s ('%s')", *name, *bind} } _ { fail; } } }), ~" or "); - ret failure(sp, fmt!{ + return failure(sp, fmt!{ "Local ambiguity: multiple parsing options: \ built-in NTs %s or %u other options.", nts, next_eis.len()}); } else if (bb_eis.len() == 0u && next_eis.len() == 0u) { - ret failure(sp, ~"No rules expected the token " + return failure(sp, ~"No rules expected the token " + to_str(*rdr.interner(), tok)); } else if (next_eis.len() > 0u) { /* Now process the next token */ diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index a5fc20c461b..3c680640a31 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -70,7 +70,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, ~[rhs]); let p = parser(cx.parse_sess(), cx.cfg(), trncbr as reader, SOURCE_FILE); - ret mr_expr(p.parse_expr()); + return mr_expr(p.parse_expr()); } failure(sp, msg) { if sp.lo >= best_fail_spot.lo { @@ -87,5 +87,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, let exp = |cx, sp, arg| generic_extension(cx, sp, arg, lhses, rhses); - ret mr_def({ident: name, ext: expr_tt({expander: exp, span: some(sp)})}); + return mr_def({ + ident: name, + ext: expr_tt({expander: exp, span: some(sp)}) + }); } \ No newline at end of file diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 9ab6261052a..9fda95c464e 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -56,7 +56,7 @@ fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>, mut cur_span: ast_util::mk_sp(0u,0u) }; tt_next_token(r); /* get cur_tok and cur_span set up */ - ret r; + return r; } pure fn dup_tt_frame(&&f: tt_frame) -> tt_frame { @@ -145,7 +145,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { alt r.cur.up { tt_frame_up(none) { r.cur_tok = EOF; - ret ret_val; + return ret_val; } tt_frame_up(some(tt_f)) { if r.cur.dotdotdoted { @@ -163,7 +163,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { alt r.cur.sep { some(tk) { r.cur_tok = tk; /* repeat same span, I guess */ - ret ret_val; + return ret_val; } none {} } @@ -180,7 +180,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { tt_tok(sp, tok) { r.cur_span = sp; r.cur_tok = tok; r.cur.idx += 1u; - ret ret_val; + return ret_val; } tt_seq(sp, tts, sep, zerok) { alt lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) { @@ -204,7 +204,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { } r.cur.idx += 1u; - ret tt_next_token(r); + return tt_next_token(r); } else { vec::push(r.repeat_len, len); vec::push(r.repeat_idx, 0u); @@ -223,12 +223,12 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { matched_nonterminal(nt_ident(sn,b)) { r.cur_span = sp; r.cur_tok = IDENT(sn,b); r.cur.idx += 1u; - ret ret_val; + return ret_val; } matched_nonterminal(other_whole_nt) { r.cur_span = sp; r.cur_tok = INTERPOLATED(other_whole_nt); r.cur.idx += 1u; - ret ret_val; + return ret_val; } matched_seq(*) { r.sp_diag.span_fatal( |
