about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/auto_serialize.rs10
-rw-r--r--src/libsyntax/ext/auto_serialize2.rs12
-rw-r--r--src/libsyntax/ext/base.rs2
-rw-r--r--src/libsyntax/ext/expand.rs4
-rw-r--r--src/libsyntax/ext/pipes/pipec.rs4
-rw-r--r--src/libsyntax/ext/simplext.rs2
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs6
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs11
8 files changed, 26 insertions, 25 deletions
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs
index 4ebb8501041..fa14d3b5e99 100644
--- a/src/libsyntax/ext/auto_serialize.rs
+++ b/src/libsyntax/ext/auto_serialize.rs
@@ -90,8 +90,8 @@ fn expand(cx: ext_ctxt,
           span: span,
           _mitem: ast::meta_item,
           in_items: ~[@ast::item]) -> ~[@ast::item] {
-    fn not_auto_serialize(a: ast::attribute) -> bool {
-        attr::get_attr_name(a) != ~"auto_serialize"
+    fn not_auto_serialize(a: &ast::attribute) -> bool {
+        attr::get_attr_name(*a) != ~"auto_serialize"
     }
 
     fn filter_attrs(item: @ast::item) -> @ast::item {
@@ -102,12 +102,12 @@ fn expand(cx: ext_ctxt,
     do vec::flat_map(in_items) |in_item| {
         match in_item.node {
           ast::item_ty(ty, tps) => {
-            vec::append(~[filter_attrs(in_item)],
+            vec::append(~[filter_attrs(*in_item)],
                         ty_fns(cx, in_item.ident, ty, tps))
           }
 
           ast::item_enum(enum_definition, tps) => {
-            vec::append(~[filter_attrs(in_item)],
+            vec::append(~[filter_attrs(*in_item)],
                         enum_fns(cx, in_item.ident,
                                  in_item.span, enum_definition.variants, tps))
           }
@@ -116,7 +116,7 @@ fn expand(cx: ext_ctxt,
             cx.span_err(span, ~"#[auto_serialize] can only be \
                                applied to type and enum \
                                definitions");
-            ~[in_item]
+            ~[*in_item]
           }
         }
     }
diff --git a/src/libsyntax/ext/auto_serialize2.rs b/src/libsyntax/ext/auto_serialize2.rs
index b51184eefd8..099ba67713f 100644
--- a/src/libsyntax/ext/auto_serialize2.rs
+++ b/src/libsyntax/ext/auto_serialize2.rs
@@ -75,8 +75,8 @@ fn expand(cx: ext_ctxt,
           span: span,
           _mitem: ast::meta_item,
           in_items: ~[@ast::item]) -> ~[@ast::item] {
-    fn not_auto_serialize2(a: ast::attribute) -> bool {
-        attr::get_attr_name(a) != ~"auto_serialize2"
+    fn not_auto_serialize2(a: &ast::attribute) -> bool {
+        attr::get_attr_name(*a) != ~"auto_serialize2"
     }
 
     fn filter_attrs(item: @ast::item) -> @ast::item {
@@ -88,19 +88,19 @@ fn expand(cx: ext_ctxt,
         match item.node {
             ast::item_ty(@{node: ast::ty_rec(fields), _}, tps) => {
                 ~[
-                    filter_attrs(item),
+                    filter_attrs(*item),
                     mk_rec_impl(cx, item.span, item.ident, fields, tps),
                 ]
             },
             ast::item_class(@{ fields, _}, tps) => {
                 ~[
-                    filter_attrs(item),
+                    filter_attrs(*item),
                     mk_struct_impl(cx, item.span, item.ident, fields, tps),
                 ]
             },
             ast::item_enum(enum_def, tps) => {
                 ~[
-                    filter_attrs(item),
+                    filter_attrs(*item),
                     mk_enum_impl(cx, item.span, item.ident, enum_def, tps),
                 ]
             },
@@ -108,7 +108,7 @@ fn expand(cx: ext_ctxt,
                 cx.span_err(span, ~"#[auto_serialize2] can only be applied \
                                     to structs, record types, and enum \
                                     definitions");
-                ~[item]
+                ~[*item]
             }
         }
     }
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 566cdc4fa21..5f4d86b9860 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -161,7 +161,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
         fn print_backtrace() { }
         fn backtrace() -> expn_info { self.backtrace }
         fn mod_push(i: ast::ident) { self.mod_path.push(i); }
-        fn mod_pop() { vec::pop(self.mod_path); }
+        fn mod_pop() { self.mod_path.pop(); }
         fn mod_path() -> ~[ast::ident] { return self.mod_path; }
         fn bt_push(ei: codemap::expn_info_) {
             match ei {
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index dbe475c1b50..22e2cfcde6b 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -144,7 +144,7 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
     // decorated with "item decorators", then use that function to transform
     // the item into a new set of items.
     let new_items = do vec::flat_map(module_.items) |item| {
-        do vec::foldr(item.attrs, ~[item]) |attr, items| {
+        do vec::foldr(item.attrs, ~[*item]) |attr, items| {
             let mname = match attr.node.value.node {
               ast::meta_word(n) => n,
               ast::meta_name_value(n, _) => n,
@@ -160,7 +160,7 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
         }
     };
 
-    return {items: new_items,.. module_};
+    return {items: new_items, ..module_};
 }
 
 
diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs
index f93fa830f92..b9b1484ce5a 100644
--- a/src/libsyntax/ext/pipes/pipec.rs
+++ b/src/libsyntax/ext/pipes/pipec.rs
@@ -47,7 +47,7 @@ impl message: gen_send {
             let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str()));
 
             let args_ast = (arg_names, tys).map(
-                |n, t| cx.arg_mode(n, t, ast::by_copy)
+                |n, t| cx.arg_mode(*n, *t, ast::by_copy)
             );
 
             let pipe_ty = cx.ty_path_ast_builder(
@@ -129,7 +129,7 @@ impl message: gen_send {
                 let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str()));
 
                 let args_ast = (arg_names, tys).map(
-                    |n, t| cx.arg_mode(cx.ident_of(n), t, ast::by_copy)
+                    |n, t| cx.arg_mode(cx.ident_of(*n), *t, ast::by_copy)
                 );
 
                 let args_ast = vec::append(
diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs
index 51239754635..e16e1c55349 100644
--- a/src/libsyntax/ext/simplext.rs
+++ b/src/libsyntax/ext/simplext.rs
@@ -307,7 +307,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
                 while idx < rc {
                     idx_path.push(idx);
                     res.push(recur(repeat_me)); // whew!
-                    vec::pop(*idx_path);
+                    idx_path.pop();
                     idx += 1u;
                 }
               }
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 737694337e3..16e3454ca2c 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -219,7 +219,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
 
         /* we append new items to this while we go */
         while cur_eis.len() > 0u { /* for each Earley Item */
-            let mut ei = vec::pop(cur_eis);
+            let mut ei = cur_eis.pop();
 
             let idx = ei.idx;
             let len = ei.elts.len();
@@ -350,13 +350,13 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
             } else if (next_eis.len() > 0u) {
                 /* Now process the next token */
                 while(next_eis.len() > 0u) {
-                    cur_eis.push(vec::pop(next_eis));
+                    cur_eis.push(next_eis.pop());
                 }
                 rdr.next_token();
             } else /* bb_eis.len() == 1 */ {
                 let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
 
-                let ei = vec::pop(bb_eis);
+                let ei = bb_eis.pop();
                 match ei.elts[ei.idx].node {
                   match_nonterminal(_, name, idx) => {
                     ei.matches[idx].push(@matched_nonterminal(
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 558593579bf..a8a41cca6cb 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -82,13 +82,13 @@ pure fn dup_tt_reader(&&r: tt_reader) -> tt_reader {
 
 pure fn lookup_cur_matched_by_matched(r: tt_reader,
                                       start: @named_match) -> @named_match {
-    pure fn red(&&ad: @named_match, &&idx: uint) -> @named_match {
+    pure fn red(+ad: @named_match, idx: &uint) -> @named_match {
         match *ad {
           matched_nonterminal(_) => {
             // end of the line; duplicate henceforth
             ad
           }
-          matched_seq(ads, _) => ads[idx]
+          matched_seq(ads, _) => ads[*idx]
         }
     }
     vec::foldl(start, r.repeat_idx, red)
@@ -122,8 +122,8 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis {
     }
     match t {
       tt_delim(tts) | tt_seq(_, tts, _, _) => {
-        vec::foldl(lis_unconstrained, tts, {|lis, tt|
-            lis_merge(lis, lockstep_iter_size(tt, r), r) })
+        vec::foldl(lis_unconstrained, tts, |lis, tt|
+            lis_merge(lis, lockstep_iter_size(*tt, r), r))
       }
       tt_tok(*) => lis_unconstrained,
       tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) {
@@ -148,7 +148,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
               }
               tt_frame_up(Some(tt_f)) => {
                 if r.cur.dotdotdoted {
-                    vec::pop(r.repeat_idx); vec::pop(r.repeat_len);
+                    r.repeat_idx.pop();
+                    r.repeat_len.pop();
                 }
 
                 r.cur = tt_f;