about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorMichael Sullivan <sully@msully.net>2012-06-25 20:00:46 -0700
committerMichael Sullivan <sully@msully.net>2012-06-25 20:00:46 -0700
commit329eca6044fdf376a7a89ec7a96dba7a8b884cf7 (patch)
tree7008814278a066914b6ba36818388d5212ffda9f /src/libsyntax
parentc087aaf56b1109163126fea4c2760f8414ffbe56 (diff)
downloadrust-329eca6044fdf376a7a89ec7a96dba7a8b884cf7.tar.gz
rust-329eca6044fdf376a7a89ec7a96dba7a8b884cf7.zip
Make vectors uglier ([]/~). Sorry. Should be temporary. Closes #2725.
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs119
-rw-r--r--src/libsyntax/ast_map.rs14
-rw-r--r--src/libsyntax/ast_util.rs31
-rw-r--r--src/libsyntax/attr.rs58
-rw-r--r--src/libsyntax/codemap.rs12
-rw-r--r--src/libsyntax/ext/auto_serialize.rs181
-rw-r--r--src/libsyntax/ext/base.rs14
-rw-r--r--src/libsyntax/ext/build.rs22
-rw-r--r--src/libsyntax/ext/concat_idents.rs4
-rw-r--r--src/libsyntax/ext/earley_parser.rs24
-rw-r--r--src/libsyntax/ext/expand.rs4
-rw-r--r--src/libsyntax/ext/fmt.rs19
-rw-r--r--src/libsyntax/ext/log_syntax.rs3
-rw-r--r--src/libsyntax/ext/qquote.rs49
-rw-r--r--src/libsyntax/ext/simplext.rs68
-rw-r--r--src/libsyntax/fold.rs17
-rw-r--r--src/libsyntax/parse.rs5
-rw-r--r--src/libsyntax/parse/attr.rs29
-rw-r--r--src/libsyntax/parse/comments.rs36
-rw-r--r--src/libsyntax/parse/common.rs22
-rw-r--r--src/libsyntax/parse/eval.rs32
-rw-r--r--src/libsyntax/parse/lexer.rs4
-rw-r--r--src/libsyntax/parse/parser.rs251
-rw-r--r--src/libsyntax/parse/token.rs4
-rw-r--r--src/libsyntax/print/pp.rs32
-rw-r--r--src/libsyntax/print/pprust.rs75
-rw-r--r--src/libsyntax/visit.rs30
27 files changed, 594 insertions, 565 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index d6d2d4f3165..936718dd0a7 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -41,9 +41,9 @@ type fn_ident = option<ident>;
 #[auto_serialize]
 type path = {span: span,
              global: bool,
-             idents: [ident],
+             idents: [ident]/~,
              rp: option<@region>,
-             types: [@ty]};
+             types: [@ty]/~};
 
 #[auto_serialize]
 type crate_num = int;
@@ -66,7 +66,7 @@ enum ty_param_bound {
 }
 
 #[auto_serialize]
-type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]};
+type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]/~};
 
 #[auto_serialize]
 enum def {
@@ -92,19 +92,19 @@ enum def {
 
 // The set of meta_items that define the compilation environment of the crate,
 // used to drive conditional compilation
-type crate_cfg = [@meta_item];
+type crate_cfg = [@meta_item]/~;
 
 type crate = spanned<crate_>;
 
 type crate_ =
-    {directives: [@crate_directive],
+    {directives: [@crate_directive]/~,
      module: _mod,
-     attrs: [attribute],
+     attrs: [attribute]/~,
      config: crate_cfg};
 
 enum crate_directive_ {
-    cdir_src_mod(ident, [attribute]),
-    cdir_dir_mod(ident, [@crate_directive], [attribute]),
+    cdir_src_mod(ident, [attribute]/~),
+    cdir_dir_mod(ident, [@crate_directive]/~, [attribute]/~),
 
     // NB: cdir_view_item is *not* processed by the rest of the compiler, the
     // attached view_items are sunk into the crate's module during parsing,
@@ -124,7 +124,7 @@ type meta_item = spanned<meta_item_>;
 #[auto_serialize]
 enum meta_item_ {
     meta_word(ident),
-    meta_list(ident, [@meta_item]),
+    meta_list(ident, [@meta_item]/~),
     meta_name_value(ident, lit),
 }
 
@@ -132,8 +132,11 @@ enum meta_item_ {
 type blk = spanned<blk_>;
 
 #[auto_serialize]
-type blk_ = {view_items: [@view_item], stmts: [@stmt], expr: option<@expr>,
-             id: node_id, rules: blk_check_mode};
+type blk_ = {view_items: [@view_item]/~,
+             stmts: [@stmt]/~,
+             expr: option<@expr>,
+             id: node_id,
+             rules: blk_check_mode};
 
 #[auto_serialize]
 type pat = {id: node_id, node: pat_, span: span};
@@ -152,10 +155,10 @@ enum pat_ {
     // records this pattern's node_id in an auxiliary
     // set (of "pat_idents that refer to nullary enums")
     pat_ident(@path, option<@pat>),
-    pat_enum(@path, option<[@pat]>), // "none" means a * pattern where
+    pat_enum(@path, option<[@pat]/~>), // "none" means a * pattern where
                                   // we don't bind the fields to names
-    pat_rec([field_pat], bool),
-    pat_tup([@pat]),
+    pat_rec([field_pat]/~, bool),
+    pat_tup([@pat]/~),
     pat_box(@pat),
     pat_uniq(@pat),
     pat_lit(@expr),
@@ -267,10 +270,10 @@ type local = spanned<local_>;
 type decl = spanned<decl_>;
 
 #[auto_serialize]
-enum decl_ { decl_local([@local]), decl_item(@item), }
+enum decl_ { decl_local([@local]/~), decl_item(@item), }
 
 #[auto_serialize]
-type arm = {pats: [@pat], guard: option<@expr>, body: blk};
+type arm = {pats: [@pat]/~, guard: option<@expr>, body: blk};
 
 #[auto_serialize]
 type field_ = {mutbl: mutability, ident: ident, expr: @expr};
@@ -293,10 +296,10 @@ enum alt_mode { alt_check, alt_exhaustive, }
 #[auto_serialize]
 enum expr_ {
     expr_vstore(@expr, vstore),
-    expr_vec([@expr], mutability),
-    expr_rec([field], option<@expr>),
-    expr_call(@expr, [@expr], bool), // True iff last argument is a block
-    expr_tup([@expr]),
+    expr_vec([@expr]/~, mutability),
+    expr_rec([field]/~, option<@expr>),
+    expr_call(@expr, [@expr]/~, bool), // True iff last argument is a block
+    expr_tup([@expr]/~),
     expr_binary(binop, @expr, @expr),
     expr_unary(unop, @expr),
     expr_lit(@lit),
@@ -307,7 +310,7 @@ enum expr_ {
        Same semantics as while(true) { body }, but typestate knows that the
        (implicit) condition is always true. */
     expr_loop(blk),
-    expr_alt(@expr, [arm], alt_mode),
+    expr_alt(@expr, [arm]/~, alt_mode),
     expr_fn(proto, fn_decl, blk, capture_clause),
     expr_fn_block(fn_decl, blk, capture_clause),
     // Inner expr is always an expr_fn_block. We need the wrapping node to
@@ -327,7 +330,7 @@ enum expr_ {
     expr_assign(@expr, @expr),
     expr_swap(@expr, @expr),
     expr_assign_op(binop, @expr, @expr),
-    expr_field(@expr, ident, [@ty]),
+    expr_field(@expr, ident, [@ty]/~),
     expr_index(@expr, @expr),
     expr_path(@path),
     expr_addr_of(mutability, @expr),
@@ -359,7 +362,7 @@ type capture_item = @{
 };
 
 #[auto_serialize]
-type capture_clause = @[capture_item];
+type capture_clause = @[capture_item]/~;
 
 /*
 // Says whether this is a block the user marked as
@@ -373,7 +376,7 @@ enum blk_sort {
 #[auto_serialize]
 enum token_tree {
     /* for macro invocations; parsing is the macro's job */
-    tt_delim([token_tree]),
+    tt_delim([token_tree]/~),
     tt_flat(span, token::token)
 }
 
@@ -384,7 +387,7 @@ type matcher = spanned<matcher_>;
 enum matcher_ {
     mtc_tok(token::token),
     /* body, separator, zero ok? : */
-    mtc_rep([matcher], option<token::token>, bool),
+    mtc_rep([matcher]/~, option<token::token>, bool),
     mtc_bb(ident, ident, uint)
 }
 
@@ -438,8 +441,8 @@ type ty_field_ = {ident: ident, mt: mt};
 type ty_field = spanned<ty_field_>;
 
 #[auto_serialize]
-type ty_method = {ident: ident, attrs: [attribute],
-                  decl: fn_decl, tps: [ty_param], span: span};
+type ty_method = {ident: ident, attrs: [attribute]/~,
+                  decl: fn_decl, tps: [ty_param]/~, span: span};
 
 #[auto_serialize]
 enum int_ty { ty_i, ty_char, ty_i8, ty_i16, ty_i32, ty_i64, }
@@ -478,11 +481,11 @@ enum ty_ {
     ty_vec(mt),
     ty_ptr(mt),
     ty_rptr(@region, mt),
-    ty_rec([ty_field]),
+    ty_rec([ty_field]/~),
     ty_fn(proto, fn_decl),
-    ty_tup([@ty]),
+    ty_tup([@ty]/~),
     ty_path(@path, node_id),
-    ty_constr(@ty, [@ty_constr]),
+    ty_constr(@ty, [@ty_constr]/~),
     ty_vstore(@ty, vstore),
     ty_mac(mac),
     // ty_infer means the type should be inferred instead of it having been
@@ -522,7 +525,7 @@ type constr_arg = spanned<fn_constr_arg>;
 
 #[auto_serialize]
 type constr_general_<ARG, ID> =
-    {path: @path, args: [@sp_constr_arg<ARG>], id: ID};
+    {path: @path, args: [@sp_constr_arg<ARG>]/~, id: ID};
 
 // In the front end, constraints have a node ID attached.
 // Typeck turns this to a def_id, using the output of resolve.
@@ -549,11 +552,11 @@ type arg = {mode: mode, ty: @ty, ident: ident, id: node_id};
 
 #[auto_serialize]
 type fn_decl =
-    {inputs: [arg],
+    {inputs: [arg]/~,
      output: @ty,
      purity: purity,
      cf: ret_style,
-     constraints: [@constr]};
+     constraints: [@constr]/~};
 
 #[auto_serialize]
 enum purity {
@@ -571,14 +574,14 @@ enum ret_style {
 }
 
 #[auto_serialize]
-type method = {ident: ident, attrs: [attribute],
-               tps: [ty_param], decl: fn_decl, body: blk,
+type method = {ident: ident, attrs: [attribute]/~,
+               tps: [ty_param]/~, decl: fn_decl, body: blk,
                id: node_id, span: span, self_id: node_id,
                vis: visibility};  // always public, unless it's a
                                   // class method
 
 #[auto_serialize]
-type _mod = {view_items: [@view_item], items: [@item]};
+type _mod = {view_items: [@view_item]/~, items: [@item]/~};
 
 #[auto_serialize]
 enum native_abi {
@@ -589,14 +592,14 @@ enum native_abi {
 
 #[auto_serialize]
 type native_mod =
-    {view_items: [@view_item],
-     items: [@native_item]};
+    {view_items: [@view_item]/~,
+     items: [@native_item]/~};
 
 #[auto_serialize]
 type variant_arg = {ty: @ty, id: node_id};
 
 #[auto_serialize]
-type variant_ = {name: ident, attrs: [attribute], args: [variant_arg],
+type variant_ = {name: ident, attrs: [attribute]/~, args: [variant_arg]/~,
                  id: node_id, disr_expr: option<@expr>, vis: visibility};
 
 #[auto_serialize]
@@ -625,18 +628,18 @@ enum view_path_ {
     view_path_glob(@path, node_id),
 
     // foo::bar::{a,b,c}
-    view_path_list(@path, [path_list_ident], node_id)
+    view_path_list(@path, [path_list_ident]/~, node_id)
 }
 
 #[auto_serialize]
-type view_item = {node: view_item_, attrs: [attribute],
+type view_item = {node: view_item_, attrs: [attribute]/~,
                   vis: visibility, span: span};
 
 #[auto_serialize]
 enum view_item_ {
-    view_item_use(ident, [@meta_item], node_id),
-    view_item_import([@view_path]),
-    view_item_export([@view_path])
+    view_item_use(ident, [@meta_item]/~, node_id),
+    view_item_import([@view_path]/~),
+    view_item_export([@view_path]/~)
 }
 
 // Meta-data associated with an item
@@ -663,7 +666,7 @@ type iface_ref = {path: @path, id: node_id};
 enum visibility { public, private }
 
 #[auto_serialize]
-type item = {ident: ident, attrs: [attribute],
+type item = {ident: ident, attrs: [attribute]/~,
              id: node_id, node: item_,
              vis: visibility, span: span};
 
@@ -676,23 +679,23 @@ enum region_param {
 #[auto_serialize]
 enum item_ {
     item_const(@ty, @expr),
-    item_fn(fn_decl, [ty_param], blk),
+    item_fn(fn_decl, [ty_param]/~, blk),
     item_mod(_mod),
     item_native_mod(native_mod),
-    item_ty(@ty, [ty_param], region_param),
-    item_enum([variant], [ty_param], region_param),
-    item_class([ty_param], /* ty params for class */
-               [@iface_ref],   /* ifaces this class implements */
-               [@class_member], /* methods, etc. */
+    item_ty(@ty, [ty_param]/~, region_param),
+    item_enum([variant]/~, [ty_param]/~, region_param),
+    item_class([ty_param]/~, /* ty params for class */
+               [@iface_ref]/~,   /* ifaces this class implements */
+               [@class_member]/~, /* methods, etc. */
                                /* (not including ctor or dtor) */
                class_ctor,
                /* dtor is optional */
                option<class_dtor>,
                region_param
                ),
-    item_iface([ty_param], region_param, [ty_method]),
-    item_impl([ty_param], region_param, option<@iface_ref> /* iface */,
-              @ty /* self */, [@method]),
+    item_iface([ty_param]/~, region_param, [ty_method]/~),
+    item_impl([ty_param]/~, region_param, option<@iface_ref> /* iface */,
+              @ty /* self */, [@method]/~),
 }
 
 #[auto_serialize]
@@ -727,14 +730,14 @@ type class_dtor_ = {id: node_id,
 #[auto_serialize]
 type native_item =
     {ident: ident,
-     attrs: [attribute],
+     attrs: [attribute]/~,
      node: native_item_,
      id: node_id,
      span: span};
 
 #[auto_serialize]
 enum native_item_ {
-    native_item_fn(fn_decl, [ty_param]),
+    native_item_fn(fn_decl, [ty_param]/~),
 }
 
 // The data we save and restore about an inlined item or method.  This is not
@@ -745,8 +748,8 @@ enum inlined_item {
     ii_item(@item),
     ii_method(def_id /* impl id */, @method),
     ii_native(@native_item),
-    ii_ctor(class_ctor, ident, [ty_param], def_id /* parent id */),
-    ii_dtor(class_dtor, ident, [ty_param], def_id /* parent id */)
+    ii_ctor(class_ctor, ident, [ty_param]/~, def_id /* parent id */),
+    ii_dtor(class_dtor, ident, [ty_param]/~, def_id /* parent id */)
 }
 
 //
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs
index 219769dd28c..96ecadc2be3 100644
--- a/src/libsyntax/ast_map.rs
+++ b/src/libsyntax/ast_map.rs
@@ -7,7 +7,7 @@ import ast_util::inlined_item_methods;
 import diagnostic::span_handler;
 
 enum path_elt { path_mod(ident), path_name(ident) }
-type path = [path_elt];
+type path = [path_elt]/~;
 
 /* FIXMEs that say "bad" are as per #2543 */
 fn path_to_str_with_sep(p: path, sep: str) -> str {
@@ -45,9 +45,9 @@ enum ast_node {
     node_local(uint),
     // Constructor for a class
     // def_id is parent id
-    node_ctor(ident, [ty_param], @class_ctor, def_id, @path),
+    node_ctor(ident, [ty_param]/~, @class_ctor, def_id, @path),
     // Destructor for a class
-    node_dtor([ty_param], @class_dtor, def_id, @path),
+    node_dtor([ty_param]/~, @class_dtor, def_id, @path),
     node_block(blk),
 }
 
@@ -57,7 +57,7 @@ type ctx = {map: map, mut path: path,
 type vt = visit::vt<ctx>;
 
 fn extend(cx: ctx, +elt: ident) -> @path {
-    @(cx.path + [path_name(elt)])
+    @(cx.path + [path_name(elt)]/~)
 }
 
 fn mk_ast_map_visitor() -> vt {
@@ -75,7 +75,7 @@ fn mk_ast_map_visitor() -> vt {
 
 fn map_crate(diag: span_handler, c: crate) -> map {
     let cx = {map: std::map::int_hash(),
-              mut path: [],
+              mut path: []/~,
               mut local_id: 0u,
               diag: diag};
     visit::visit_crate(c, cx, mk_ast_map_visitor());
@@ -229,9 +229,9 @@ fn map_item(i: @item, cx: ctx, v: vt) {
     }
     alt i.node {
       item_mod(_) | item_native_mod(_) {
-        cx.path += [path_mod(i.ident)];
+        cx.path += [path_mod(i.ident)]/~;
       }
-      _ { cx.path += [path_name(i.ident)]; }
+      _ { cx.path += [path_name(i.ident)]/~; }
     }
     visit::visit_item(i, cx, v);
     vec::pop(cx.path);
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index f402e1f6ab5..7c0a7158c17 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -23,7 +23,7 @@ pure fn dummy_sp() -> span { ret mk_sp(0u, 0u); }
 
 pure fn path_name(p: @path) -> str { path_name_i(p.idents) }
 
-pure fn path_name_i(idents: [ident]) -> str {
+pure fn path_name_i(idents: [ident]/~) -> str {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
     str::connect(idents.map({|i|*i}), "::")
 }
@@ -246,18 +246,19 @@ fn new_def_hash<V: copy>() -> std::map::hashmap<ast::def_id, V> {
 }
 
 fn block_from_expr(e: @expr) -> blk {
-    let blk_ = default_block([], option::some::<@expr>(e), e.id);
+    let blk_ = default_block([]/~, option::some::<@expr>(e), e.id);
     ret {node: blk_, span: e.span};
 }
 
-fn default_block(+stmts1: [@stmt], expr1: option<@expr>, id1: node_id) ->
+fn default_block(+stmts1: [@stmt]/~, expr1: option<@expr>, id1: node_id) ->
    blk_ {
-    {view_items: [], stmts: stmts1, expr: expr1, id: id1, rules: default_blk}
+    {view_items: []/~, stmts: stmts1,
+     expr: expr1, id: id1, rules: default_blk}
 }
 
 fn ident_to_path(s: span, +i: ident) -> @path {
-    @{span: s, global: false, idents: [i],
-      rp: none, types: []}
+    @{span: s, global: false, idents: [i]/~,
+      rp: none, types: []/~}
 }
 
 pure fn is_unguarded(&&a: arm) -> bool {
@@ -267,7 +268,7 @@ pure fn is_unguarded(&&a: arm) -> bool {
     }
 }
 
-pure fn unguarded_pat(a: arm) -> option<[@pat]> {
+pure fn unguarded_pat(a: arm) -> option<[@pat]/~> {
     if is_unguarded(a) { some(/* FIXME (#2543) */ copy a.pats) } else { none }
 }
 
@@ -286,14 +287,14 @@ pure fn class_item_ident(ci: @class_member) -> ident {
 type ivar = {ident: ident, ty: @ty, cm: class_mutability,
              id: node_id, vis: visibility};
 
-fn public_methods(ms: [@method]) -> [@method] {
+fn public_methods(ms: [@method]/~) -> [@method]/~ {
     vec::filter(ms, {|m| alt m.vis {
                     public { true }
                     _   { false }}})
 }
 
-fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
-    let mut vs = [], ms = [];
+fn split_class_items(cs: [@class_member]/~) -> ([ivar]/~, [@method]/~) {
+    let mut vs = []/~, ms = []/~;
     for cs.each {|c|
       alt c.node {
         instance_var(i, t, cm, id, vis) {
@@ -301,9 +302,9 @@ fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
                   ty: t,
                   cm: cm,
                   id: id,
-                  vis: vis}];
+                  vis: vis}]/~;
         }
-        class_method(m) { ms += [m]; }
+        class_method(m) { ms += [m]/~; }
       }
     };
     (vs, ms)
@@ -383,8 +384,8 @@ fn dtor_dec() -> fn_decl {
     let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
     // dtor has one argument, of type ()
     {inputs: [{mode: ast::expl(ast::by_ref),
-               ty: nil_t, ident: @"_", id: 0}],
-     output: nil_t, purity: impure_fn, cf: return_val, constraints: []}
+               ty: nil_t, ident: @"_", id: 0}]/~,
+     output: nil_t, purity: impure_fn, cf: return_val, constraints: []/~}
 }
 
 // ______________________________________________________________________
@@ -471,7 +472,7 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
             }
         },
 
-        visit_ty_params: fn@(ps: [ty_param]) {
+        visit_ty_params: fn@(ps: [ty_param]/~) {
             vec::iter(ps) {|p| vfn(p.id) }
         },
 
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index fb9560065a8..af808222f11 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -56,7 +56,7 @@ fn mk_name_value_item(+name: ast::ident, +value: ast::lit)
     ret @dummy_spanned(ast::meta_name_value(name, value));
 }
 
-fn mk_list_item(+name: ast::ident, +items: [@ast::meta_item]) ->
+fn mk_list_item(+name: ast::ident, +items: [@ast::meta_item]/~) ->
    @ast::meta_item {
     ret @dummy_spanned(ast::meta_list(name, items));
 }
@@ -75,9 +75,9 @@ fn mk_attr(item: @ast::meta_item) -> ast::attribute {
 fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
 
 // Get the meta_items from inside a vector of attributes
-fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
-    let mut mitems = [];
-    for attrs.each {|a| mitems += [attr_meta(a)]; }
+fn attr_metas(attrs: [ast::attribute]/~) -> [@ast::meta_item]/~ {
+    let mut mitems = []/~;
+    for attrs.each {|a| mitems += [attr_meta(a)]/~; }
     ret mitems;
 }
 
@@ -118,7 +118,7 @@ fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@str> {
 }
 
 #[doc = "Gets a list of inner meta items from a list meta_item type"]
-fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]> {
+fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]/~> {
     alt meta.node {
       ast::meta_list(_, l) { option::some(/* FIXME (#2543) */ copy l) }
       _ { option::none }
@@ -147,8 +147,8 @@ fn get_name_value_str_pair(
 #[doc = "
 Search a list of attributes and return only those with a specific name
 "]
-fn find_attrs_by_name(attrs: [ast::attribute], +name: str) ->
-   [ast::attribute] {
+fn find_attrs_by_name(attrs: [ast::attribute]/~, +name: str) ->
+   [ast::attribute]/~ {
     let filter = (
         fn@(a: ast::attribute) -> option<ast::attribute> {
             if *get_attr_name(a) == name {
@@ -162,8 +162,8 @@ fn find_attrs_by_name(attrs: [ast::attribute], +name: str) ->
 #[doc = "
 Searcha list of meta items and return only those with a specific name
 "]
-fn find_meta_items_by_name(metas: [@ast::meta_item], +name: str) ->
-   [@ast::meta_item] {
+fn find_meta_items_by_name(metas: [@ast::meta_item]/~, +name: str) ->
+   [@ast::meta_item]/~ {
     let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> {
         if *get_meta_item_name(m) == name {
             option::some(m)
@@ -176,7 +176,7 @@ fn find_meta_items_by_name(metas: [@ast::meta_item], +name: str) ->
 Returns true if a list of meta items contains another meta item. The
 comparison is performed structurally.
 "]
-fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool {
+fn contains(haystack: [@ast::meta_item]/~, needle: @ast::meta_item) -> bool {
     #debug("looking for %s",
            print::pprust::meta_item_to_str(*needle));
     for haystack.each {|item|
@@ -201,7 +201,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
           }
           ast::meta_list(na, la) {
 
-            // [Fixme-sorting]
+            // [Fixme-sorting]/~
             // FIXME (#607): Needs implementing
             // This involves probably sorting the list by name and
             // meta_item variant
@@ -210,16 +210,16 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
         }
 }
 
-fn contains_name(metas: [@ast::meta_item], +name: str) -> bool {
+fn contains_name(metas: [@ast::meta_item]/~, +name: str) -> bool {
     let matches = find_meta_items_by_name(metas, name);
     ret vec::len(matches) > 0u;
 }
 
-fn attrs_contains_name(attrs: [ast::attribute], +name: str) -> bool {
+fn attrs_contains_name(attrs: [ast::attribute]/~, +name: str) -> bool {
     vec::is_not_empty(find_attrs_by_name(attrs, name))
 }
 
-fn first_attr_value_str_by_name(attrs: [ast::attribute], +name: str)
+fn first_attr_value_str_by_name(attrs: [ast::attribute]/~, +name: str)
     -> option<@str> {
     let mattrs = find_attrs_by_name(attrs, name);
     if vec::len(mattrs) > 0u {
@@ -229,7 +229,7 @@ fn first_attr_value_str_by_name(attrs: [ast::attribute], +name: str)
 }
 
 fn last_meta_item_by_name(
-    items: [@ast::meta_item],
+    items: [@ast::meta_item]/~,
     +name: str
 ) -> option<@ast::meta_item> {
     let items = attr::find_meta_items_by_name(items, name);
@@ -237,7 +237,7 @@ fn last_meta_item_by_name(
 }
 
 fn last_meta_item_value_str_by_name(
-    items: [@ast::meta_item],
+    items: [@ast::meta_item]/~,
     +name: str
 ) -> option<@str> {
     alt last_meta_item_by_name(items, name) {
@@ -252,9 +252,9 @@ fn last_meta_item_value_str_by_name(
 }
 
 fn last_meta_item_list_by_name(
-    items: [@ast::meta_item],
+    items: [@ast::meta_item]/~,
     +name: str
-) -> option<[@ast::meta_item]> {
+) -> option<[@ast::meta_item]/~> {
     alt last_meta_item_by_name(items, name) {
       some(item) {
         attr::get_meta_item_list(item)
@@ -268,7 +268,7 @@ fn last_meta_item_list_by_name(
 
 // FIXME (#607): This needs to sort by meta_item variant in addition to
 // the item name (See [Fixme-sorting])
-fn sort_meta_items(+items: [@ast::meta_item]) -> [@ast::meta_item] {
+fn sort_meta_items(+items: [@ast::meta_item]/~) -> [@ast::meta_item]/~ {
     fn lteq(&&ma: @ast::meta_item, &&mb: @ast::meta_item) -> bool {
         fn key(m: @ast::meta_item) -> ast::ident {
             alt m.node {
@@ -281,13 +281,13 @@ fn sort_meta_items(+items: [@ast::meta_item]) -> [@ast::meta_item] {
     }
 
     // This is sort of stupid here, converting to a vec of mutables and back
-    let v: [mut @ast::meta_item] = vec::to_mut(items);
+    let v: [mut @ast::meta_item]/~ = vec::to_mut(items);
     std::sort::quick_sort(lteq, v);
     ret vec::from_mut(v);
 }
 
-fn remove_meta_items_by_name(items: [@ast::meta_item], name: ast::ident) ->
-   [@ast::meta_item] {
+fn remove_meta_items_by_name(items: [@ast::meta_item]/~, name: ast::ident) ->
+   [@ast::meta_item]/~ {
 
     ret vec::filter_map(items, {
         |item|
@@ -299,11 +299,11 @@ fn remove_meta_items_by_name(items: [@ast::meta_item], name: ast::ident) ->
     });
 }
 
-fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
-    let mut found = [];
+fn find_linkage_attrs(attrs: [ast::attribute]/~) -> [ast::attribute]/~ {
+    let mut found = []/~;
     for find_attrs_by_name(attrs, "link").each {|attr|
         alt attr.node.value.node {
-          ast::meta_list(_, _) { found += [attr] }
+          ast::meta_list(_, _) { found += [attr]/~ }
           _ { #debug("ignoring link attribute that has incorrect type"); }
         }
     }
@@ -314,7 +314,7 @@ fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
 From a list of crate attributes get only the meta_items that impact crate
 linkage
 "]
-fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
+fn find_linkage_metas(attrs: [ast::attribute]/~) -> [@ast::meta_item]/~ {
     find_linkage_attrs(attrs).flat_map {|attr|
         alt check attr.node.value.node {
           ast::meta_list(_, items) { /* FIXME (#2543) */ copy items }
@@ -322,7 +322,7 @@ fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
     }
 }
 
-fn native_abi(attrs: [ast::attribute]) -> either<str, ast::native_abi> {
+fn native_abi(attrs: [ast::attribute]/~) -> either<str, ast::native_abi> {
     ret alt attr::first_attr_value_str_by_name(attrs, "abi") {
       option::none {
         either::right(ast::native_abi_cdecl)
@@ -349,7 +349,7 @@ enum inline_attr {
 }
 
 #[doc = "True if something like #[inline] is found in the list of attrs."]
-fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr {
+fn find_inline_attr(attrs: [ast::attribute]/~) -> inline_attr {
     // TODO---validate the usage of #[inline] and #[inline(always)]
     vec::foldl(ia_none, attrs) {|ia,attr|
         alt attr.node.value.node {
@@ -368,7 +368,7 @@ fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr {
 
 
 fn require_unique_names(diagnostic: span_handler,
-                        metas: [@ast::meta_item]) {
+                        metas: [@ast::meta_item]/~) {
     let map = map::str_hash();
     for metas.each {|meta|
         let name = get_meta_item_name(meta);
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 49560fb5bbd..83085c2cc0f 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -43,7 +43,7 @@ enum file_substr {
 
 type filemap =
     @{name: filename, substr: file_substr, src: @str,
-      start_pos: file_pos, mut lines: [file_pos]};
+      start_pos: file_pos, mut lines: [file_pos]/~};
 
 type codemap = @{files: dvec<filemap>};
 
@@ -57,7 +57,7 @@ fn new_filemap_w_substr(+filename: filename, +substr: file_substr,
    -> filemap {
     ret @{name: filename, substr: substr, src: src,
           start_pos: {ch: start_pos_ch, byte: start_pos_byte},
-          mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]};
+          mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]/~};
 }
 
 fn new_filemap(+filename: filename, src: @str,
@@ -74,7 +74,7 @@ fn mk_substr_filename(cm: codemap, sp: span) -> str
 }
 
 fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
-    file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}];
+    file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}]/~;
 }
 
 type lookup_fn = pure fn(file_pos) -> uint;
@@ -174,7 +174,7 @@ fn span_to_str(sp: span, cm: codemap) -> str {
              lo.line, lo.col, hi.line, hi.col)
 }
 
-type file_lines = {file: filemap, lines: [uint]};
+type file_lines = {file: filemap, lines: [uint]/~};
 
 fn span_to_filename(sp: span, cm: codemap::codemap) -> filename {
     let lo = lookup_char_pos(cm, sp.lo);
@@ -184,8 +184,8 @@ fn span_to_filename(sp: span, cm: codemap::codemap) -> filename {
 fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
     let lo = lookup_char_pos(cm, sp.lo);
     let hi = lookup_char_pos(cm, sp.hi);
-    let mut lines = [];
-    for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]; };
+    let mut lines = []/~;
+    for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]/~; };
     ret @{file: lo.file, lines: lines};
 }
 
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs
index fe9f59d5538..f3c356923f2 100644
--- a/src/libsyntax/ext/auto_serialize.rs
+++ b/src/libsyntax/ext/auto_serialize.rs
@@ -84,13 +84,13 @@ mod syntax {
     export parse;
 }
 
-type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> [@ast::stmt]>;
+type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> [@ast::stmt]/~>;
 type deser_tps_map = map::hashmap<str, fn@() -> @ast::expr>;
 
 fn expand(cx: ext_ctxt,
           span: span,
           _mitem: ast::meta_item,
-          in_items: [@ast::item]) -> [@ast::item] {
+          in_items: [@ast::item]/~) -> [@ast::item]/~ {
     fn not_auto_serialize(a: ast::attribute) -> bool {
         attr::get_attr_name(a) != @"auto_serialize"
     }
@@ -103,11 +103,11 @@ fn expand(cx: ext_ctxt,
     vec::flat_map(in_items) {|in_item|
         alt in_item.node {
           ast::item_ty(ty, tps, _) {
-            [filter_attrs(in_item)] + ty_fns(cx, in_item.ident, ty, tps)
+            [filter_attrs(in_item)]/~ + ty_fns(cx, in_item.ident, ty, tps)
           }
 
           ast::item_enum(variants, tps, _) {
-            [filter_attrs(in_item)] + enum_fns(cx, in_item.ident,
+            [filter_attrs(in_item)]/~ + enum_fns(cx, in_item.ident,
                                                in_item.span, variants, tps)
           }
 
@@ -115,7 +115,7 @@ fn expand(cx: ext_ctxt,
             cx.span_err(span, "#[auto_serialize] can only be \
                                applied to type and enum \
                                definitions");
-            [in_item]
+            [in_item]/~
           }
         }
     }
@@ -126,26 +126,27 @@ impl helpers for ext_ctxt {
                    helper_name: str) -> @ast::path {
         let head = vec::init(base_path.idents);
         let tail = vec::last(base_path.idents);
-        self.path(base_path.span, head + [@(helper_name + "_" + *tail)])
+        self.path(base_path.span, head + [@(helper_name + "_" + *tail)]/~)
     }
 
-    fn path(span: span, strs: [ast::ident]) -> @ast::path {
-        @{span: span, global: false, idents: strs, rp: none, types: []}
+    fn path(span: span, strs: [ast::ident]/~) -> @ast::path {
+        @{span: span, global: false, idents: strs, rp: none, types: []/~}
     }
 
-    fn path_tps(span: span, strs: [ast::ident],
-                tps: [@ast::ty]) -> @ast::path {
+    fn path_tps(span: span, strs: [ast::ident]/~,
+                tps: [@ast::ty]/~) -> @ast::path {
         @{span: span, global: false, idents: strs, rp: none, types: tps}
     }
 
-    fn ty_path(span: span, strs: [ast::ident], tps: [@ast::ty]) -> @ast::ty {
+    fn ty_path(span: span, strs: [ast::ident]/~,
+               tps: [@ast::ty]/~) -> @ast::ty {
         @{id: self.next_id(),
           node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()),
           span: span}
     }
 
     fn ty_fn(span: span,
-             -input_tys: [@ast::ty],
+             -input_tys: [@ast::ty]/~,
              -output: @ast::ty) -> @ast::ty {
         let args = vec::map(input_tys) {|ty|
             {mode: ast::expl(ast::by_ref),
@@ -159,7 +160,7 @@ impl helpers for ext_ctxt {
                                             output: output,
                                             purity: ast::impure_fn,
                                             cf: ast::return_val,
-                                            constraints: []}),
+                                            constraints: []/~}),
           span: span}
     }
 
@@ -172,11 +173,11 @@ impl helpers for ext_ctxt {
     }
 
     fn var_ref(span: span, name: ast::ident) -> @ast::expr {
-        self.expr(span, ast::expr_path(self.path(span, [name])))
+        self.expr(span, ast::expr_path(self.path(span, [name]/~)))
     }
 
-    fn blk(span: span, stmts: [@ast::stmt]) -> ast::blk {
-        {node: {view_items: [],
+    fn blk(span: span, stmts: [@ast::stmt]/~) -> ast::blk {
+        {node: {view_items: []/~,
                 stmts: stmts,
                 expr: none,
                 id: self.next_id(),
@@ -185,8 +186,8 @@ impl helpers for ext_ctxt {
     }
 
     fn expr_blk(expr: @ast::expr) -> ast::blk {
-        {node: {view_items: [],
-                stmts: [],
+        {node: {view_items: []/~,
+                stmts: []/~,
                 expr: some(expr),
                 id: self.next_id(),
                 rules: ast::default_blk},
@@ -194,8 +195,8 @@ impl helpers for ext_ctxt {
     }
 
     fn binder_pat(span: span, nm: ast::ident) -> @ast::pat {
-        let path = @{span: span, global: false, idents: [nm],
-                     rp: none, types: []};
+        let path = @{span: span, global: false, idents: [nm]/~,
+                     rp: none, types: []/~};
         @{id: self.next_id(),
           node: ast::pat_ident(path, none),
           span: span}
@@ -206,7 +207,8 @@ impl helpers for ext_ctxt {
           span: expr.span}
     }
 
-    fn alt_stmt(arms: [ast::arm], span: span, -v: @ast::expr) -> @ast::stmt {
+    fn alt_stmt(arms: [ast::arm]/~,
+                span: span, -v: @ast::expr) -> @ast::stmt {
         self.stmt(
             self.expr(
                 span,
@@ -277,7 +279,7 @@ impl helpers for ext_ctxt {
 
 fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
                   -s: @ast::expr, -v: @ast::expr)
-    -> [@ast::stmt] {
+    -> [@ast::stmt]/~ {
     let ext_cx = cx; // required for #ast{}
 
     // We want to take a path like a::b::c<...> and generate a call
@@ -299,15 +301,15 @@ fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
     [cx.stmt(
         cx.expr(
             path.span,
-            ast::expr_call(callee, [s, v] + ty_args, false)))]
+            ast::expr_call(callee, [s, v]/~ + ty_args, false)))]/~
 }
 
 fn ser_variant(cx: ext_ctxt,
                tps: ser_tps_map,
-               tys: [@ast::ty],
+               tys: [@ast::ty]/~,
                span: span,
                -s: @ast::expr,
-               pfn: fn([@ast::pat]) -> ast::pat_,
+               pfn: fn([@ast::pat]/~) -> ast::pat_,
                bodyfn: fn(-@ast::expr, ast::blk) -> @ast::expr,
                argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr)
     -> ast::arm {
@@ -326,9 +328,9 @@ fn ser_variant(cx: ext_ctxt,
     };
 
     let body_blk = cx.blk(span, stmts);
-    let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]);
+    let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]/~);
 
-    {pats: [pat], guard: none, body: body}
+    {pats: [pat]/~, guard: none, body: body}
 }
 
 fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
@@ -338,34 +340,34 @@ fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
 
 fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
                 ty: @ast::ty, -s: @ast::expr, -v: @ast::expr)
-    -> [@ast::stmt] {
+    -> [@ast::stmt]/~ {
 
     let ext_cx = cx; // required for #ast{}
 
     alt ty.node {
       ast::ty_nil {
-        [#ast[stmt]{$(s).emit_nil()}]
+        [#ast[stmt]{$(s).emit_nil()}]/~
       }
 
       ast::ty_bot {
         cx.span_err(
             ty.span, #fmt["Cannot serialize bottom type"]);
-        []
+        []/~
       }
 
       ast::ty_box(mt) {
         let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
-        [#ast(stmt){$(s).emit_box($(l));}]
+        [#ast(stmt){$(s).emit_box($(l));}]/~
       }
 
       ast::ty_uniq(mt) {
         let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
-        [#ast(stmt){$(s).emit_uniq($(l));}]
+        [#ast(stmt){$(s).emit_uniq($(l));}]/~
       }
 
       ast::ty_ptr(_) | ast::ty_rptr(_, _) {
         cx.span_err(ty.span, "cannot serialize pointer types");
-        []
+        []/~
       }
 
       ast::ty_rec(flds) {
@@ -374,7 +376,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
             let vf = cx.expr(fld.span,
                              ast::expr_field(cx.clone(v),
                                              fld.node.ident,
-                                             []));
+                                             []/~));
             let s = cx.clone(s);
             let f = cx.lit_str(fld.span, fld.node.ident);
             let i = cx.lit_uint(fld.span, fidx);
@@ -382,12 +384,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
             #ast(stmt){$(s).emit_rec_field($(f), $(i), $(l));}
         };
         let fld_lambda = cx.lambda(cx.blk(ty.span, fld_stmts));
-        [#ast(stmt){$(s).emit_rec($(fld_lambda));}]
+        [#ast(stmt){$(s).emit_rec($(fld_lambda));}]/~
       }
 
       ast::ty_fn(_, _) {
         cx.span_err(ty.span, "cannot serialize function types");
-        []
+        []/~
       }
 
       ast::ty_tup(tys) {
@@ -420,8 +422,8 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
                     let body = cx.lambda(blk);
                     #ast{ $(s).emit_tup_elt($(idx), $(body)) }
                 })
-        ];
-        [cx.alt_stmt(arms, ty.span, v)]
+        ]/~;
+        [cx.alt_stmt(arms, ty.span, v)]/~
       }
 
       ast::ty_path(path, _) {
@@ -444,12 +446,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
 
       ast::ty_mac(_) {
         cx.span_err(ty.span, "cannot serialize macro types");
-        []
+        []/~
       }
 
       ast::ty_infer {
         cx.span_err(ty.span, "cannot serialize inferred types");
-        []
+        []/~
       }
 
       ast::ty_vstore(@{node: ast::ty_vec(mt),_}, ast::vstore_uniq) |
@@ -467,7 +469,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
 
         [#ast(stmt){
             std::serialization::emit_from_vec($(s), $(v), {|__e| $(ser_e) })
-        }]
+        }]/~
       }
 
       ast::ty_vstore(_, _) {
@@ -477,20 +479,21 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
     }
 }
 
-fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
+fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
+             tps: [ast::ty_param]/~,
              f: fn(ext_ctxt, ser_tps_map,
-                   -@ast::expr, -@ast::expr) -> [@ast::stmt])
+                   -@ast::expr, -@ast::expr) -> [@ast::stmt]/~)
     -> @ast::item {
     let ext_cx = cx; // required for #ast
 
-    let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
-    let v_ty = cx.ty_path(span, [name], tp_types);
+    let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident]/~, []/~)});
+    let v_ty = cx.ty_path(span, [name]/~, tp_types);
 
     let tp_inputs =
         vec::map(tps, {|tp|
             {mode: ast::expl(ast::by_ref),
              ty: cx.ty_fn(span,
-                          [cx.ty_path(span, [tp.ident], [])],
+                          [cx.ty_path(span, [tp.ident]/~, []/~)]/~,
                           cx.ty_nil(span)),
              ident: @("__s" + *tp.ident),
              id: cx.next_id()}});
@@ -498,15 +501,15 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
     #debug["tp_inputs = %?", tp_inputs];
 
 
-    let ser_inputs: [ast::arg] =
+    let ser_inputs: [ast::arg]/~ =
         [{mode: ast::expl(ast::by_ref),
-          ty: cx.ty_path(span, [@"__S"], []),
+          ty: cx.ty_path(span, [@"__S"]/~, []/~),
           ident: @"__s",
           id: cx.next_id()},
          {mode: ast::expl(ast::by_ref),
           ty: v_ty,
           ident: @"__v",
-          id: cx.next_id()}]
+          id: cx.next_id()}]/~
         + tp_inputs;
 
     let tps_map = map::str_hash();
@@ -514,22 +517,23 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
         let arg_ident = arg.ident;
         tps_map.insert(
             *tp.ident,
-            fn@(v: @ast::expr) -> [@ast::stmt] {
+            fn@(v: @ast::expr) -> [@ast::stmt]/~ {
                 let f = cx.var_ref(span, arg_ident);
                 #debug["serializing type arg %s", *arg_ident];
-                [#ast(stmt){$(f)($(v));}]
+                [#ast(stmt){$(f)($(v));}]/~
             });
     }
 
     let ser_bnds = @[
         ast::bound_iface(cx.ty_path(span,
-                                    [@"std", @"serialization", @"serializer"],
-                                    []))];
+                                    [@"std", @"serialization",
+                                     @"serializer"]/~,
+                                    []/~))]/~;
 
-    let ser_tps: [ast::ty_param] =
+    let ser_tps: [ast::ty_param]/~ =
         [{ident: @"__S",
           id: cx.next_id(),
-          bounds: ser_bnds}] +
+          bounds: ser_bnds}]/~ +
         vec::map(tps) {|tp| cx.clone_ty_param(tp) };
 
     let ser_output: @ast::ty = @{id: cx.next_id(),
@@ -540,13 +544,13 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
                          f(cx, tps_map, #ast{ __s }, #ast{ __v }));
 
     @{ident: @("serialize_" + *name),
-      attrs: [],
+      attrs: []/~,
       id: cx.next_id(),
       node: ast::item_fn({inputs: ser_inputs,
                           output: ser_output,
                           purity: ast::impure_fn,
                           cf: ast::return_val,
-                          constraints: []},
+                          constraints: []/~},
                          ser_tps,
                          ser_blk),
       vis: ast::public,
@@ -571,7 +575,7 @@ fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path,
         cx.lambda(cx.expr_blk(dv_expr))
     };
 
-    cx.expr(path.span, ast::expr_call(callee, [d] + ty_args, false))
+    cx.expr(path.span, ast::expr_call(callee, [d]/~ + ty_args, false))
 }
 
 fn deser_lambda(cx: ext_ctxt, tps: deser_tps_map, ty: @ast::ty,
@@ -688,30 +692,30 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
 }
 
 fn mk_deser_fn(cx: ext_ctxt, span: span,
-               name: ast::ident, tps: [ast::ty_param],
+               name: ast::ident, tps: [ast::ty_param]/~,
                f: fn(ext_ctxt, deser_tps_map, -@ast::expr) -> @ast::expr)
     -> @ast::item {
     let ext_cx = cx; // required for #ast
 
-    let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
-    let v_ty = cx.ty_path(span, [name], tp_types);
+    let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident]/~, []/~)});
+    let v_ty = cx.ty_path(span, [name]/~, tp_types);
 
     let tp_inputs =
         vec::map(tps, {|tp|
             {mode: ast::expl(ast::by_ref),
              ty: cx.ty_fn(span,
-                          [],
-                          cx.ty_path(span, [tp.ident], [])),
+                          []/~,
+                          cx.ty_path(span, [tp.ident]/~, []/~)),
              ident: @("__d" + *tp.ident),
              id: cx.next_id()}});
 
     #debug["tp_inputs = %?", tp_inputs];
 
-    let deser_inputs: [ast::arg] =
+    let deser_inputs: [ast::arg]/~ =
         [{mode: ast::expl(ast::by_ref),
-          ty: cx.ty_path(span, [@"__D"], []),
+          ty: cx.ty_path(span, [@"__D"]/~, []/~),
           ident: @"__d",
-          id: cx.next_id()}]
+          id: cx.next_id()}]/~
         + tp_inputs;
 
     let tps_map = map::str_hash();
@@ -728,46 +732,47 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
     let deser_bnds = @[
         ast::bound_iface(cx.ty_path(
             span,
-            [@"std", @"serialization", @"deserializer"],
-            []))];
+            [@"std", @"serialization", @"deserializer"]/~,
+            []/~))]/~;
 
-    let deser_tps: [ast::ty_param] =
+    let deser_tps: [ast::ty_param]/~ =
         [{ident: @"__D",
           id: cx.next_id(),
-          bounds: deser_bnds}] + vec::map(tps) {|tp|
+          bounds: deser_bnds}]/~ + vec::map(tps) {|tp|
         let cloned = cx.clone_ty_param(tp);
-        {bounds: @(*cloned.bounds + [ast::bound_copy]) with cloned}
+        {bounds: @(*cloned.bounds + [ast::bound_copy]/~) with cloned}
     };
 
     let deser_blk = cx.expr_blk(f(cx, tps_map, #ast(expr){__d}));
 
     @{ident: @("deserialize_" + *name),
-      attrs: [],
+      attrs: []/~,
       id: cx.next_id(),
       node: ast::item_fn({inputs: deser_inputs,
                           output: v_ty,
                           purity: ast::impure_fn,
                           cf: ast::return_val,
-                          constraints: []},
+                          constraints: []/~},
                          deser_tps,
                          deser_blk),
       vis: ast::public,
       span: span}
 }
 
-fn ty_fns(cx: ext_ctxt, name: ast::ident, ty: @ast::ty, tps: [ast::ty_param])
-    -> [@ast::item] {
+fn ty_fns(cx: ext_ctxt, name: ast::ident,
+          ty: @ast::ty, tps: [ast::ty_param]/~)
+    -> [@ast::item]/~ {
 
     let span = ty.span;
     [
         mk_ser_fn(cx, span, name, tps, {|a,b,c,d|ser_ty(a, b, ty, c, d)}),
         mk_deser_fn(cx, span, name, tps, {|a,b,c|deser_ty(a, b, ty, c)})
-    ]
+    ]/~
 }
 
 fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
-            e_span: span, variants: [ast::variant],
-            -s: @ast::expr, -v: @ast::expr) -> [@ast::stmt] {
+            e_span: span, variants: [ast::variant]/~,
+            -s: @ast::expr, -v: @ast::expr) -> [@ast::stmt]/~ {
     let ext_cx = cx;
     let arms = vec::from_fn(vec::len(variants)) {|vidx|
         let variant = variants[vidx];
@@ -781,9 +786,9 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
             // Generate pattern var(v1, v2, v3)
             {|pats|
                 if vec::is_empty(pats) {
-                    ast::pat_ident(cx.path(v_span, [v_name]), none)
+                    ast::pat_ident(cx.path(v_span, [v_name]/~), none)
                 } else {
-                    ast::pat_enum(cx.path(v_span, [v_name]), some(pats))
+                    ast::pat_enum(cx.path(v_span, [v_name]/~), some(pats))
                 }
             },
 
@@ -809,16 +814,16 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
                 }
             })
     };
-    let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]));
+    let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]/~));
     let e_name = cx.lit_str(e_span, e_name);
-    [#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]
+    [#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]/~
 }
 
 fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
-              e_span: span, variants: [ast::variant],
+              e_span: span, variants: [ast::variant]/~,
               -d: @ast::expr) -> @ast::expr {
     let ext_cx = cx;
-    let arms: [ast::arm] = vec::from_fn(vec::len(variants)) {|vidx|
+    let arms: [ast::arm]/~ = vec::from_fn(vec::len(variants)) {|vidx|
         let variant = variants[vidx];
         let v_span = variant.span;
         let v_name = variant.node.name;
@@ -843,7 +848,7 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
 
         {pats: [@{id: cx.next_id(),
                   node: ast::pat_lit(cx.lit_uint(v_span, vidx)),
-                  span: v_span}],
+                  span: v_span}]/~,
          guard: none,
          body: cx.expr_blk(body)}
     };
@@ -859,12 +864,12 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
 }
 
 fn enum_fns(cx: ext_ctxt, e_name: ast::ident, e_span: span,
-               variants: [ast::variant], tps: [ast::ty_param])
-    -> [@ast::item] {
+               variants: [ast::variant]/~, tps: [ast::ty_param]/~)
+    -> [@ast::item]/~ {
     [
         mk_ser_fn(cx, e_span, e_name, tps,
                   {|a,b,c,d|ser_enum(a, b, e_name, e_span, variants, c, d)}),
         mk_deser_fn(cx, e_span, e_name, tps,
                     {|a,b,c|deser_enum(a, b, e_name, e_span, variants, c)})
-    ]
+    ]/~
 }
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 6c93dbcd7ef..96a1efdfe7a 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -12,7 +12,7 @@ type macro_def = {ident: ast::ident, ext: syntax_extension};
 type macro_definer =
     fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def;
 type item_decorator =
-    fn@(ext_ctxt, span, ast::meta_item, [@ast::item]) -> [@ast::item];
+    fn@(ext_ctxt, span, ast::meta_item, [@ast::item]/~) -> [@ast::item]/~;
 
 type syntax_expander_tt = {expander: syntax_expander_tt_, span: option<span>};
 type syntax_expander_tt_ = fn@(ext_ctxt, span, ast::token_tree) -> @ast::expr;
@@ -72,7 +72,7 @@ iface ext_ctxt {
     fn backtrace() -> expn_info;
     fn mod_push(mod_name: ast::ident);
     fn mod_pop();
-    fn mod_path() -> [ast::ident];
+    fn mod_path() -> [ast::ident]/~;
     fn bt_push(ei: codemap::expn_info_);
     fn bt_pop();
     fn span_fatal(sp: span, msg: str) -> !;
@@ -88,7 +88,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
     type ctxt_repr = {parse_sess: parse::parse_sess,
                       cfg: ast::crate_cfg,
                       mut backtrace: expn_info,
-                      mut mod_path: [ast::ident]};
+                      mut mod_path: [ast::ident]/~};
     impl of ext_ctxt for ctxt_repr {
         fn codemap() -> codemap { self.parse_sess.cm }
         fn parse_sess() -> parse::parse_sess { self.parse_sess }
@@ -97,7 +97,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
         fn backtrace() -> expn_info { self.backtrace }
         fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); }
         fn mod_pop() { vec::pop(self.mod_path); }
-        fn mod_path() -> [ast::ident] { ret self.mod_path; }
+        fn mod_path() -> [ast::ident]/~ { ret self.mod_path; }
         fn bt_push(ei: codemap::expn_info_) {
             alt ei {
               expanded_from({call_site: cs, callie: callie}) {
@@ -145,7 +145,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
         parse_sess: parse_sess,
         cfg: cfg,
         mut backtrace: none,
-        mut mod_path: []
+        mut mod_path: []/~
     };
     ret imp as ext_ctxt
 }
@@ -185,12 +185,12 @@ fn make_new_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) ->
 }
 
 fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                       min: uint, name: str) -> [@ast::expr] {
+                       min: uint, name: str) -> [@ast::expr]/~ {
     ret get_mac_args(cx, sp, arg, min, none, name);
 }
 
 fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                min: uint, max: option<uint>, name: str) -> [@ast::expr] {
+                min: uint, max: option<uint>, name: str) -> [@ast::expr]/~ {
     alt arg {
       some(expr) {
         alt expr.node {
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 4e0c6889092..2e5fe63eb5a 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -28,35 +28,35 @@ fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr)
     let expr = ast::expr_unary(op, e);
     ret @{id: cx.next_id(), node: expr, span: sp};
 }
-fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]) ->
+fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]/~) ->
     @ast::expr {
     let path = @{span: sp, global: false, idents: idents,
-                 rp: none, types: []};
+                 rp: none, types: []/~};
     let pathexpr = ast::expr_path(path);
     ret @{id: cx.next_id(), node: pathexpr, span: sp};
 }
 fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident)
     -> @ast::expr {
-    let expr = ast::expr_field(p, m, []);
+    let expr = ast::expr_field(p, m, []/~);
     ret @{id: cx.next_id(), node: expr, span: sp};
 }
-fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident], m: ast::ident)
+fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident]/~, m: ast::ident)
     -> @ast::expr {
     let pathexpr = mk_path(cx, sp, p);
     ret mk_access_(cx, sp, pathexpr, m);
 }
 fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr,
-            args: [@ast::expr]) -> @ast::expr {
+            args: [@ast::expr]/~) -> @ast::expr {
     let callexpr = ast::expr_call(fn_expr, args, false);
     ret @{id: cx.next_id(), node: callexpr, span: sp};
 }
-fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident],
-             args: [@ast::expr]) -> @ast::expr {
+fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident]/~,
+             args: [@ast::expr]/~) -> @ast::expr {
     let pathexpr = mk_path(cx, sp, fn_path);
     ret mk_call_(cx, sp, pathexpr, args);
 }
 // e = expr, t = type
-fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]) ->
+fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]/~) ->
    @ast::expr {
     let vecexpr = ast::expr_vec(exprs, ast::m_imm);
     ret @{id: cx.next_id(), node: vecexpr, span: sp};
@@ -72,15 +72,15 @@ fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]/~) ->
 }
 
 fn mk_rec_e(cx: ext_ctxt, sp: span,
-            fields: [{ident: ast::ident, ex: @ast::expr}]) ->
+            fields: [{ident: ast::ident, ex: @ast::expr}]/~) ->
     @ast::expr {
-    let mut astfields: [ast::field] = [];
+    let mut astfields: [ast::field]/~ = []/~;
     for fields.each {|field|
         let ident = field.ident;
         let val = field.ex;
         let astfield =
             {node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp};
-        astfields += [astfield];
+        astfields += [astfield]/~;
     }
     let recexpr = ast::expr_rec(astfields, option::none::<@ast::expr>);
     ret @{id: cx.next_id(), node: recexpr, span: sp};
diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs
index faf8e1a0868..9f445218007 100644
--- a/src/libsyntax/ext/concat_idents.rs
+++ b/src/libsyntax/ext/concat_idents.rs
@@ -9,7 +9,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
     }
 
     ret @{id: cx.next_id(),
-          node: ast::expr_path(@{span: sp, global: false, idents: [@res],
-                                 rp: none, types: []}),
+          node: ast::expr_path(@{span: sp, global: false, idents: [@res]/~,
+                                 rp: none, types: []/~}),
           span: sp};
 }
diff --git a/src/libsyntax/ext/earley_parser.rs b/src/libsyntax/ext/earley_parser.rs
index b1a2524ca35..223cca25694 100644
--- a/src/libsyntax/ext/earley_parser.rs
+++ b/src/libsyntax/ext/earley_parser.rs
@@ -32,11 +32,11 @@ fn is_some(&&mpu: matcher_pos_up) -> bool {
 }
 
 type matcher_pos = ~{
-    elts: [ast::matcher], // maybe should be /& ? Need to understand regions.
+    elts: [ast::matcher]/~, // maybe should be /&? Need to understand regions.
     sep: option<token>,
     mut idx: uint,
     mut up: matcher_pos_up, // mutable for swapping only
-    matches: [dvec<@arb_depth>]
+    matches: [dvec<@arb_depth>]/~
 };
 
 fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos {
@@ -55,26 +55,26 @@ fn count_names(ms: [matcher]/&) -> uint {
         }})
 }
 
-fn new_matcher_pos(ms: [matcher], sep: option<token>) -> matcher_pos {
+fn new_matcher_pos(ms: [matcher]/~, sep: option<token>) -> matcher_pos {
     ~{elts: ms, sep: sep, mut idx: 0u, mut up: matcher_pos_up(none),
       matches: copy vec::from_fn(count_names(ms), {|_i| dvec::dvec()}) }
 }
 
 /* logically, an arb_depth should contain only one kind of nonterminal */
-enum arb_depth { leaf(whole_nt), seq([@arb_depth]) }
+enum arb_depth { leaf(whole_nt), seq([@arb_depth]/~) }
 
 type earley_item = matcher_pos;
 
 
-fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher])
-    -> [@arb_depth] {
-    let mut cur_eis = [];
+fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher]/~)
+    -> [@arb_depth]/~ {
+    let mut cur_eis = []/~;
     vec::push(cur_eis, new_matcher_pos(ms, none));
 
     loop {
-        let mut bb_eis = []; // black-box parsed by parser.rs
-        let mut next_eis = []; // or proceed normally
-        let mut eof_eis = [];
+        let mut bb_eis = []/~; // black-box parsed by parser.rs
+        let mut next_eis = []/~; // or proceed normally
+        let mut eof_eis = []/~;
 
         let {tok: tok, sp: _} = rdr.peek();
 
@@ -218,12 +218,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher])
 
 fn parse_nt(p: parser, name: str) -> whole_nt {
     alt name {
-      "item" { alt p.parse_item([], ast::public) {
+      "item" { alt p.parse_item([]/~, ast::public) {
         some(i) { token::w_item(i) }
         none { p.fatal("expected an item keyword") }
       }}
       "block" { token::w_block(p.parse_block()) }
-      "stmt" { token::w_stmt(p.parse_stmt([])) }
+      "stmt" { token::w_stmt(p.parse_stmt([]/~)) }
       "pat" { token::w_pat(p.parse_pat()) }
       "expr" { token::w_expr(p.parse_expr()) }
       "ty" { token::w_ty(p.parse_ty(false /* no need to disambiguate*/)) }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index d7bb7835822..ca5d7f6bab3 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -45,7 +45,7 @@ fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
                   some(macro_defining(ext)) {
                     let named_extension = ext(cx, pth.span, args, body);
                     exts.insert(*named_extension.ident, named_extension.ext);
-                    (ast::expr_rec([], none), s)
+                    (ast::expr_rec([]/~, none), s)
                   }
                   some(normal_tt(_)) {
                     cx.span_fatal(pth.span,
@@ -101,7 +101,7 @@ fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
     // decorated with "item decorators", then use that function to transform
     // the item into a new set of items.
     let new_items = vec::flat_map(module.items) {|item|
-        vec::foldr(item.attrs, [item]) {|attr, items|
+        vec::foldr(item.attrs, [item]/~) {|attr, items|
             let mname = alt attr.node.value.node {
               ast::meta_word(n) { n }
               ast::meta_name_value(n, _) { n }
diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs
index 24a5aed7d28..43408dec739 100644
--- a/src/libsyntax/ext/fmt.rs
+++ b/src/libsyntax/ext/fmt.rs
@@ -34,10 +34,11 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 // probably be factored out in common with other code that builds
 // expressions.  Also: Cleanup the naming of these functions.
 // NOTE: Moved many of the common ones to build.rs --kevina
-fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
+fn pieces_to_expr(cx: ext_ctxt, sp: span,
+                  pieces: [piece]/~, args: [@ast::expr]/~)
    -> @ast::expr {
-    fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident] {
-        ret [@"extfmt", @"rt", ident];
+    fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident]/~ {
+        ret [@"extfmt", @"rt", ident]/~;
     }
     fn make_rt_path_expr(cx: ext_ctxt, sp: span,
                          ident: ast::ident) -> @ast::expr {
@@ -48,8 +49,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
     // which tells the RT::conv* functions how to perform the conversion
 
     fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
-        fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
-            let mut flagexprs: [@ast::expr] = [];
+        fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]/~) -> @ast::expr {
+            let mut flagexprs: [@ast::expr]/~ = []/~;
             for flags.each {|f|
                 let mut fstr;
                 alt f {
@@ -59,7 +60,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
                   flag_sign_always { fstr = "flag_sign_always"; }
                   flag_alternate { fstr = "flag_alternate"; }
                 }
-                flagexprs += [make_rt_path_expr(cx, sp, @fstr)];
+                flagexprs += [make_rt_path_expr(cx, sp, @fstr)]/~;
             }
             ret mk_uniq_vec_e(cx, sp, flagexprs);
         }
@@ -71,7 +72,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
               count_is(c) {
                 let count_lit = mk_int(cx, sp, c);
                 let count_is_path = make_path_vec(cx, @"count_is");
-                let count_is_args = [count_lit];
+                let count_is_args = [count_lit]/~;
                 ret mk_call(cx, sp, count_is_path, count_is_args);
               }
               _ { cx.span_unimpl(sp, "unimplemented #fmt conversion"); }
@@ -99,7 +100,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
                          [{ident: @"flags", ex: flags_expr},
                           {ident: @"width", ex: width_expr},
                           {ident: @"precision", ex: precision_expr},
-                          {ident: @"ty", ex: ty_expr}]);
+                          {ident: @"ty", ex: ty_expr}]/~);
         }
         let rt_conv_flags = make_flags(cx, sp, cnv.flags);
         let rt_conv_width = make_count(cx, sp, cnv.width);
@@ -113,7 +114,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
         let fname = "conv_" + conv_type;
         let path = make_path_vec(cx, @fname);
         let cnv_expr = make_rt_conv_expr(cx, sp, cnv);
-        let args = [cnv_expr, arg];
+        let args = [cnv_expr, arg]/~;
         ret mk_call(cx, arg.span, path, args);
     }
     fn make_new_conv(cx: ext_ctxt, sp: span, cnv: conv, arg: @ast::expr) ->
diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs
index 5ccbb143b97..50fac765483 100644
--- a/src/libsyntax/ext/log_syntax.rs
+++ b/src/libsyntax/ext/log_syntax.rs
@@ -11,5 +11,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
     );
 
     //trivial expression
-    ret @{id: cx.next_id(), node: ast::expr_rec([], option::none), span: sp};
+    ret @{id: cx.next_id(), node: ast::expr_rec([]/~, option::none),
+          span: sp};
 }
diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs
index caef1841faf..648532d3024 100644
--- a/src/libsyntax/ext/qquote.rs
+++ b/src/libsyntax/ext/qquote.rs
@@ -35,7 +35,7 @@ impl of qq_helper for @ast::crate {
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_crate"])
+        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_crate"]/~)
     }
     fn get_fold_fn() -> str {"fold_crate"}
 }
@@ -49,7 +49,7 @@ impl of qq_helper for @ast::expr {
         }
     }
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_expr"])
+        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_expr"]/~)
     }
     fn get_fold_fn() -> str {"fold_expr"}
 }
@@ -63,7 +63,7 @@ impl of qq_helper for @ast::ty {
         }
     }
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_ty"])
+        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_ty"]/~)
     }
     fn get_fold_fn() -> str {"fold_ty"}
 }
@@ -72,7 +72,7 @@ impl of qq_helper for @ast::item {
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_item"])
+        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_item"]/~)
     }
     fn get_fold_fn() -> str {"fold_item"}
 }
@@ -81,7 +81,7 @@ impl of qq_helper for @ast::stmt {
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_stmt"])
+        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_stmt"]/~)
     }
     fn get_fold_fn() -> str {"fold_stmt"}
 }
@@ -90,7 +90,7 @@ impl of qq_helper for @ast::pat {
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_pat"])
+        mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_pat"]/~)
     }
     fn get_fold_fn() -> str {"fold_pat"}
 }
@@ -133,12 +133,12 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
 {
     let mut what = "expr";
     option::iter(arg) {|arg|
-        let args: [@ast::expr] =
+        let args: [@ast::expr]/~ =
             alt arg.node {
               ast::expr_vec(elts, _) { elts }
               _ {
                 ecx.span_fatal
-                    (_sp, "#ast requires arguments of the form `[...]`.")
+                    (_sp, "#ast requires arguments of the form `[...]/~`.")
               }
             };
         if vec::len::<@ast::expr>(args) != 1u {
@@ -163,14 +163,14 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
     };
 }
 
-fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]) }
+fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]/~) }
 fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) }
-fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]) }
+fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]/~) }
 fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() }
 fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() }
 
 fn parse_item(p: parser) -> @ast::item {
-    alt p.parse_item([], ast::public) {
+    alt p.parse_item([]/~, ast::public) {
       some(item) { item }
       none       { fail "parse_item: parsing an item failed"; }
     }
@@ -230,47 +230,48 @@ fn finish<T: qq_helper>
     let cx = ecx;
 
     let cfg_call = {||
-        mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"], @"cfg"), [])
+        mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"]/~, @"cfg"), []/~)
     };
 
     let parse_sess_call = {||
-        mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"], @"parse_sess"), [])
+        mk_call_(cx, sp,
+                 mk_access(cx, sp, [@"ext_cx"]/~, @"parse_sess"), []/~)
     };
 
     let pcall = mk_call(cx,sp,
                        [@"syntax", @"parse", @"parser",
-                        @"parse_from_source_str"],
+                        @"parse_from_source_str"]/~,
                        [node.mk_parse_fn(cx,sp),
                         mk_str(cx,sp, fname),
                         mk_call(cx,sp,
                                 [@"syntax",@"ext",
-                                 @"qquote", @"mk_file_substr"],
+                                 @"qquote", @"mk_file_substr"]/~,
                                 [mk_str(cx,sp, loc.file.name),
                                  mk_uint(cx,sp, loc.line),
-                                 mk_uint(cx,sp, loc.col)]),
+                                 mk_uint(cx,sp, loc.col)]/~),
                         mk_unary(cx,sp, ast::box(ast::m_imm),
                                  mk_str(cx,sp, str2)),
                         cfg_call(),
-                        parse_sess_call()]
+                        parse_sess_call()]/~
                       );
     let mut rcall = pcall;
     if (g_len > 0u) {
         rcall = mk_call(cx,sp,
-                        [@"syntax", @"ext", @"qquote", @"replace"],
+                        [@"syntax", @"ext", @"qquote", @"replace"]/~,
                         [pcall,
                          mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec {|g|
                              mk_call(cx,sp,
                                      [@"syntax", @"ext",
-                                      @"qquote", @g.constr],
-                                     [g.e])}),
+                                      @"qquote", @g.constr]/~,
+                                     [g.e]/~)}),
                          mk_path(cx,sp,
                                  [@"syntax", @"ext", @"qquote",
-                                  @node.get_fold_fn()])]);
+                                  @node.get_fold_fn()]/~)]/~);
     }
     ret rcall;
 }
 
-fn replace<T>(node: T, repls: [fragment], ff: fn (ast_fold, T) -> T)
+fn replace<T>(node: T, repls: [fragment]/~, ff: fn (ast_fold, T) -> T)
     -> T
 {
     let aft = default_ast_fold();
@@ -290,7 +291,7 @@ fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {f.fold_item(n)}
 fn fold_stmt(f: ast_fold, &&n: @ast::stmt) -> @ast::stmt {f.fold_stmt(n)}
 fn fold_pat(f: ast_fold, &&n: @ast::pat) -> @ast::pat {f.fold_pat(n)}
 
-fn replace_expr(repls: [fragment],
+fn replace_expr(repls: [fragment]/~,
                 e: ast::expr_, s: span, fld: ast_fold,
                 orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
     -> (ast::expr_, span)
@@ -304,7 +305,7 @@ fn replace_expr(repls: [fragment],
     }
 }
 
-fn replace_ty(repls: [fragment],
+fn replace_ty(repls: [fragment]/~,
                 e: ast::ty_, s: span, fld: ast_fold,
                 orig: fn@(ast::ty_, span, ast_fold)->(ast::ty_, span))
     -> (ast::ty_, span)
diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs
index 4dad13dc06a..c29b2246993 100644
--- a/src/libsyntax/ext/simplext.rs
+++ b/src/libsyntax/ext/simplext.rs
@@ -22,7 +22,7 @@ fn path_to_ident(pth: @path) -> option<ident> {
 type clause = {params: binders, body: @expr};
 
 /* logically, an arb_depth should contain only one kind of matchable */
-enum arb_depth<T> { leaf(T), seq(@[arb_depth<T>], span), }
+enum arb_depth<T> { leaf(T), seq(@[arb_depth<T>]/~, span), }
 
 
 enum matchable {
@@ -70,8 +70,8 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! {
 type match_result = option<arb_depth<matchable>>;
 type selector = fn@(matchable) -> match_result;
 
-fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
-   {pre: [@expr], rep: option<@expr>, post: [@expr]} {
+fn elts_to_ell(cx: ext_ctxt, elts: [@expr]/~) ->
+   {pre: [@expr]/~, rep: option<@expr>, post: [@expr]/~} {
     let mut idx: uint = 0u;
     let mut res = none;
     for elts.each {|elt|
@@ -96,15 +96,15 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
     }
     ret alt res {
           some(val) { val }
-          none { {pre: elts, rep: none, post: []} }
+          none { {pre: elts, rep: none, post: []/~} }
         }
 }
 
-fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
-   option<[U]> {
-    let mut res = [];
+fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]/~) ->
+   option<[U]/~> {
+    let mut res = []/~;
     for v.each {|elem|
-        alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
+        alt f(elem) { none { ret none; } some(fv) { res += [fv]/~; } }
     }
     ret some(res);
 }
@@ -182,7 +182,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
 /* use the bindings on the body to generate the expanded code */
 
 fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
-    let idx_path: @mut [uint] = @mut [];
+    let idx_path: @mut [uint]/~ = @mut []/~;
     fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { ret cx.next_id(); }
     fn new_span(cx: ext_ctxt, sp: span) -> span {
         /* this discards information in the case of macro-defining macros */
@@ -214,7 +214,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
 
 
 /* helper: descend into a matcher */
-fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
+fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]/~) ->
    arb_depth<matchable> {
     let mut res: arb_depth<matchable> = m;
     for vec::each(*idx_path) {|idx|
@@ -227,7 +227,7 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
 }
 
 fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
-                    idx_path: @mut [uint]) -> option<matchable> {
+                    idx_path: @mut [uint]/~) -> option<matchable> {
     alt mmaybe {
       none { ret none }
       some(m) {
@@ -264,8 +264,9 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
 
 
 /* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
-fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
-                    recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] {
+fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
+                    recur: fn@(&&@expr) -> @expr,
+                    exprs: [@expr]/~) -> [@expr]/~ {
     alt elts_to_ell(cx, exprs) {
       {pre: pre, rep: repeat_me_maybe, post: post} {
         let mut res = vec::map(pre, recur);
@@ -308,8 +309,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
                 /* Whew, we now know how how many times to repeat */
                 let mut idx: uint = 0u;
                 while idx < rc {
-                    *idx_path += [idx];
-                    res += [recur(repeat_me)]; // whew!
+                    *idx_path += [idx]/~;
+                    res += [recur(repeat_me)]/~; // whew!
                     vec::pop(*idx_path);
                     idx += 1u;
                 }
@@ -326,7 +327,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
 
 
 // substitute, in a position that's required to be an ident
-fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
+fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
                     &&i: ident, _fld: ast_fold) -> ident {
     ret alt follow_for_trans(cx, b.find(i), idx_path) {
           some(match_ident(a_id)) { a_id.node }
@@ -336,14 +337,14 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
 }
 
 
-fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
+fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
                    p: path, _fld: ast_fold) -> path {
     // Don't substitute into qualified names.
     if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { ret p; }
     alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
       some(match_ident(id)) {
-        {span: id.span, global: false, idents: [id.node],
-         rp: none, types: []}
+        {span: id.span, global: false, idents: [id.node]/~,
+         rp: none, types: []/~}
       }
       some(match_path(a_pth)) { *a_pth }
       some(m) { match_error(cx, m, "a path") }
@@ -352,7 +353,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
 }
 
 
-fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
+fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
                    e: ast::expr_, s: span, fld: ast_fold,
                    orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
     -> (ast::expr_, span)
@@ -367,9 +368,9 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
               some(match_ident(id)) {
                 (expr_path(@{span: id.span,
                              global: false,
-                             idents: [id.node],
+                             idents: [id.node]/~,
                              rp: none,
-                             types: []}), id.span)
+                             types: []/~}), id.span)
               }
               some(match_path(a_pth)) { (expr_path(a_pth), s) }
               some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
@@ -381,7 +382,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
         }
 }
 
-fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
+fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
                    t: ast::ty_, s: span, fld: ast_fold,
                    orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span))
     -> (ast::ty_, span)
@@ -407,7 +408,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
 /* for parsing reasons, syntax variables bound to blocks must be used like
 `{v}` */
 
-fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
+fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
                     blk: blk_, s: span, fld: ast_fold,
                     orig: fn@(blk_, span, ast_fold) -> (blk_, span))
     -> (blk_, span)
@@ -458,7 +459,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
                 }
               }
               {pre: pre, rep: none, post: post} {
-                if post != [] {
+                if post != []/~ {
                     cx.bug("elts_to_ell provided an invalid result");
                 }
                 p_t_s_r_length(cx, vec::len(pre), false, s, b);
@@ -606,10 +607,10 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
               match_expr(e) {
                 alt e.node {
                   expr_vec(arg_elts, _) {
-                    let mut elts = [];
+                    let mut elts = []/~;
                     let mut idx = offset;
                     while idx < vec::len(arg_elts) {
-                        elts += [leaf(match_expr(arg_elts[idx]))];
+                        vec::push(elts, leaf(match_expr(arg_elts[idx])));
                         idx += 1u;
                     }
 
@@ -651,7 +652,7 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
         compose_sels(s, {|x|len_select(cx, x, at_least, len)}));
 }
 
-fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool,
+fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr]/~, _repeat_after: bool,
                          s: selector, b: binders) {
     let mut idx: uint = 0u;
     while idx < vec::len(elts) {
@@ -679,14 +680,14 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
     let args = get_mac_args_no_max(cx, sp, arg, 0u, "macro");
 
     let mut macro_name: option<@str> = none;
-    let mut clauses: [@clause] = [];
+    let mut clauses: [@clause]/~ = []/~;
     for args.each {|arg|
         alt arg.node {
           expr_vec(elts, mutbl) {
             if vec::len(elts) != 2u {
                 cx.span_fatal((*arg).span,
                               "extension clause must consist of [" +
-                                  "macro invocation, expansion body]");
+                                  "macro invocation, expansion body]/~");
             }
 
 
@@ -719,7 +720,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                     };
                     clauses +=
                         [@{params: pattern_to_selectors(cx, arg),
-                           body: elts[1u]}];
+                           body: elts[1u]}]/~;
 
                     // FIXME (#2251): check duplicates (or just simplify
                     // the macro arg situation)
@@ -739,7 +740,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
           }
           _ {
             cx.span_fatal((*arg).span,
-                          "extension must be [clause, " + " ...]");
+                          "extension must be [clause, " + " ...]/~");
           }
         }
     }
@@ -759,7 +760,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
          ext: normal({expander: ext, span: some(option::get(arg).span)})};
 
     fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                         _body: ast::mac_body, clauses: [@clause]) -> @expr {
+                         _body: ast::mac_body,
+                         clauses: [@clause]/~) -> @expr {
         let arg = alt arg {
           some(arg) { arg }
           none { cx.span_fatal(sp, "macro must have arguments")}
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 492210a55b8..d1ee7a16a47 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -41,7 +41,7 @@ iface ast_fold {
     fn fold_ident(&&ident) -> ident;
     fn fold_path(&&@path) -> @path;
     fn fold_local(&&@local) -> @local;
-    fn map_exprs(fn@(&&@expr) -> @expr, [@expr]) -> [@expr];
+    fn map_exprs(fn@(&&@expr) -> @expr, [@expr]/~) -> [@expr]/~;
     fn new_id(node_id) -> node_id;
     fn new_span(span) -> span;
 }
@@ -75,7 +75,7 @@ type ast_fold_precursor = @{
     fold_ident: fn@(&&ident, ast_fold) -> ident,
     fold_path: fn@(path, ast_fold) -> path,
     fold_local: fn@(local_, span, ast_fold) -> (local_, span),
-    map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]) -> [@expr],
+    map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]/~) -> [@expr]/~,
     new_id: fn@(node_id) -> node_id,
     new_span: fn@(span) -> span};
 
@@ -151,7 +151,7 @@ fn fold_ty_param(tp: ty_param, fld: ast_fold) -> ty_param {
      bounds: @vec::map(*tp.bounds, {|x|fold_ty_param_bound(x, fld)})}
 }
 
-fn fold_ty_params(tps: [ty_param], fld: ast_fold) -> [ty_param] {
+fn fold_ty_params(tps: [ty_param]/~, fld: ast_fold) -> [ty_param]/~ {
     vec::map(tps, {|x|fold_ty_param(x, fld)})
 }
 
@@ -335,10 +335,11 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
                        {|pats| vec::map(pats, fld.fold_pat)})
           }
           pat_rec(fields, etc) {
-            let mut fs = [];
+            let mut fs = []/~;
             for fields.each {|f|
-                fs += [{ident: /* FIXME (#2543) */ copy f.ident,
-                        pat: fld.fold_pat(f.pat)}];
+                vec::push(fs,
+                          {ident: /* FIXME (#2543) */ copy f.ident,
+                           pat: fld.fold_pat(f.pat)});
             }
             pat_rec(fs, etc)
           }
@@ -570,7 +571,7 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ {
 
 /* temporarily eta-expand because of a compiler bug with using `fn<T>` as a
    value */
-fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]) -> [@expr] {
+fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]/~) -> [@expr]/~ {
     ret vec::map(es, f);
 }
 
@@ -717,7 +718,7 @@ impl of ast_fold for ast_fold_precursor {
         let (n, s) = self.fold_local(x.node, x.span, self as ast_fold);
         ret @{node: n, span: self.new_span(s)};
     }
-    fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]) -> [@expr] {
+    fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]/~) -> [@expr]/~ {
         self.map_exprs(f, e)
     }
     fn new_id(node_id: ast::node_id) -> node_id {
diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs
index d062f4bde6d..2e309f2bd14 100644
--- a/src/libsyntax/parse.rs
+++ b/src/libsyntax/parse.rs
@@ -119,7 +119,8 @@ fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
 }
 
 fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
-                              +attrs: [ast::attribute], vis: ast::visibility,
+                              +attrs: [ast::attribute]/~,
+                              vis: ast::visibility,
                               sess: parse_sess) -> option<@ast::item> {
     let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
                                                   codemap::fss_none, source);
@@ -197,7 +198,7 @@ fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
 }
 
 fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg,
-                      tt: [ast::token_tree]) -> parser {
+                      tt: [ast::token_tree]/~) -> parser {
     let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, tt);
     ret parser(sess, cfg, trdr as reader, parser::SOURCE_FILE)
 }
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index dad180847ee..4d78bcdc0a9 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -7,11 +7,11 @@ export parser_attr;
 
 // A type to distingush between the parsing of item attributes or syntax
 // extensions, which both begin with token.POUND
-type attr_or_ext = option<either<[ast::attribute], @ast::expr>>;
+type attr_or_ext = option<either<[ast::attribute]/~, @ast::expr>>;
 
 impl parser_attr for parser {
 
-    fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute])
+    fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute]/~)
         -> attr_or_ext
     {
         let expect_item_next = vec::is_not_empty(first_item_attrs);
@@ -21,7 +21,8 @@ impl parser_attr for parser {
                 self.bump();
                 let first_attr =
                     self.parse_attribute_naked(ast::attr_outer, lo);
-                ret some(left([first_attr] + self.parse_outer_attributes()));
+                ret some(left([first_attr]/~ +
+                              self.parse_outer_attributes()));
             } else if !(self.look_ahead(1u) == token::LT
                         || self.look_ahead(1u) == token::LBRACKET
                         || self.look_ahead(1u) == token::POUND
@@ -33,11 +34,11 @@ impl parser_attr for parser {
     }
 
     // Parse attributes that appear before an item
-    fn parse_outer_attributes() -> [ast::attribute] {
-        let mut attrs: [ast::attribute] = [];
+    fn parse_outer_attributes() -> [ast::attribute]/~ {
+        let mut attrs: [ast::attribute]/~ = []/~;
         while self.token == token::POUND
             && self.look_ahead(1u) == token::LBRACKET {
-            attrs += [self.parse_attribute(ast::attr_outer)];
+            attrs += [self.parse_attribute(ast::attr_outer)]/~;
         }
         ret attrs;
     }
@@ -64,9 +65,9 @@ impl parser_attr for parser {
     // is an inner attribute of the containing item or an outer attribute of
     // the first contained item until we see the semi).
     fn parse_inner_attrs_and_next() ->
-        {inner: [ast::attribute], next: [ast::attribute]} {
-        let mut inner_attrs: [ast::attribute] = [];
-        let mut next_outer_attrs: [ast::attribute] = [];
+        {inner: [ast::attribute]/~, next: [ast::attribute]/~} {
+        let mut inner_attrs: [ast::attribute]/~ = []/~;
+        let mut next_outer_attrs: [ast::attribute]/~ = []/~;
         while self.token == token::POUND {
             if self.look_ahead(1u) != token::LBRACKET {
                 // This is an extension
@@ -75,13 +76,13 @@ impl parser_attr for parser {
             let attr = self.parse_attribute(ast::attr_inner);
             if self.token == token::SEMI {
                 self.bump();
-                inner_attrs += [attr];
+                inner_attrs += [attr]/~;
             } else {
                 // It's not really an inner attribute
                 let outer_attr =
                     spanned(attr.span.lo, attr.span.hi,
                             {style: ast::attr_outer, value: attr.node.value});
-                next_outer_attrs += [outer_attr];
+                next_outer_attrs += [outer_attr]/~;
                 break;
             }
         }
@@ -110,15 +111,15 @@ impl parser_attr for parser {
         }
     }
 
-    fn parse_meta_seq() -> [@ast::meta_item] {
+    fn parse_meta_seq() -> [@ast::meta_item]/~ {
         ret self.parse_seq(token::LPAREN, token::RPAREN,
                            seq_sep_trailing_disallowed(token::COMMA),
                            {|p| p.parse_meta_item()}).node;
     }
 
-    fn parse_optional_meta() -> [@ast::meta_item] {
+    fn parse_optional_meta() -> [@ast::meta_item]/~ {
         alt self.token { token::LPAREN { ret self.parse_meta_seq(); }
-                         _ { ret []; } }
+                         _ { ret []/~; } }
     }
 }
 
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index 54d14f2eaf4..2f10a30bd55 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -16,7 +16,7 @@ enum cmnt_style {
     blank_line, // Just a manual blank line "\n\n", for layout
 }
 
-type cmnt = {style: cmnt_style, lines: [str], pos: uint};
+type cmnt = {style: cmnt_style, lines: [str]/~, pos: uint};
 
 fn read_to_eol(rdr: string_reader) -> str {
     let mut val = "";
@@ -41,14 +41,14 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
     }
 }
 
-fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]) {
+fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]/~) {
     #debug(">>> blank-line comment");
-    let v: [str] = [];
-    comments += [{style: blank_line, lines: v, pos: rdr.chpos}];
+    let v: [str]/~ = []/~;
+    comments += [{style: blank_line, lines: v, pos: rdr.chpos}]/~;
 }
 
 fn consume_whitespace_counting_blank_lines(rdr: string_reader,
-                                           &comments: [cmnt]) {
+                                           &comments: [cmnt]/~) {
     while is_whitespace(rdr.curr) && !is_eof(rdr) {
         if rdr.col == 0u && rdr.curr == '\n' {
             push_blank_line_comment(rdr, comments);
@@ -62,18 +62,18 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
     let p = rdr.chpos;
     #debug("<<< shebang comment");
     ret {style: if code_to_the_left { trailing } else { isolated },
-         lines: [read_one_line_comment(rdr)],
+         lines: [read_one_line_comment(rdr)]/~,
          pos: p};
 }
 
 fn read_line_comments(rdr: string_reader, code_to_the_left: bool) -> cmnt {
     #debug(">>> line comments");
     let p = rdr.chpos;
-    let mut lines: [str] = [];
+    let mut lines: [str]/~ = []/~;
     while rdr.curr == '/' && nextch(rdr) == '/' {
         let line = read_one_line_comment(rdr);
         log(debug, line);
-        lines += [line];
+        lines += [line]/~;
         consume_non_eol_whitespace(rdr);
     }
     #debug("<<< line comments");
@@ -88,7 +88,7 @@ fn all_whitespace(s: str, begin: uint, end: uint) -> bool {
     ret true;
 }
 
-fn trim_whitespace_prefix_and_push_line(&lines: [str],
+fn trim_whitespace_prefix_and_push_line(&lines: [str]/~,
                                         s: str, col: uint) unsafe {
     let mut s1;
     let len = str::len(s);
@@ -98,13 +98,13 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str],
         } else { s1 = ""; }
     } else { s1 = s; }
     log(debug, "pushing line: " + s1);
-    lines += [s1];
+    lines += [s1]/~;
 }
 
 fn read_block_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
     #debug(">>> block comment");
     let p = rdr.chpos;
-    let mut lines: [str] = [];
+    let mut lines: [str]/~ = []/~;
     let mut col: uint = rdr.col;
     bump(rdr);
     bump(rdr);
@@ -153,14 +153,14 @@ fn peeking_at_comment(rdr: string_reader) -> bool {
 }
 
 fn consume_comment(rdr: string_reader, code_to_the_left: bool,
-                   &comments: [cmnt]) {
+                   &comments: [cmnt]/~) {
     #debug(">>> consume comment");
     if rdr.curr == '/' && nextch(rdr) == '/' {
-        comments += [read_line_comments(rdr, code_to_the_left)];
+        comments += [read_line_comments(rdr, code_to_the_left)]/~;
     } else if rdr.curr == '/' && nextch(rdr) == '*' {
-        comments += [read_block_comment(rdr, code_to_the_left)];
+        comments += [read_block_comment(rdr, code_to_the_left)]/~;
     } else if rdr.curr == '#' && nextch(rdr) == '!' {
-        comments += [read_shebang_comment(rdr, code_to_the_left)];
+        comments += [read_shebang_comment(rdr, code_to_the_left)]/~;
     } else { fail; }
     #debug("<<< consume comment");
 }
@@ -170,7 +170,7 @@ type lit = {lit: str, pos: uint};
 fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
                                 path: str,
                                 srdr: io::reader) ->
-   {cmnts: [cmnt], lits: [lit]} {
+   {cmnts: [cmnt]/~, lits: [lit]/~} {
     let src = @str::from_bytes(srdr.read_whole_stream());
     let itr = @interner::mk::<@str>(
         {|x|str::hash(*x)},
@@ -179,8 +179,8 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
     let rdr = lexer::new_low_level_string_reader
         (span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr);
 
-    let mut comments: [cmnt] = [];
-    let mut literals: [lit] = [];
+    let mut comments: [cmnt]/~ = []/~;
+    let mut literals: [lit]/~ = []/~;
     let mut first_read: bool = true;
     while !is_eof(rdr) {
         loop {
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 1d92561a108..8cc6f3d6484 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -149,9 +149,9 @@ impl parser_common for parser {
     }
 
     fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
-                                       f: fn(parser) -> T) -> [T] {
+                                       f: fn(parser) -> T) -> [T]/~ {
         let mut first = true;
-        let mut v = [];
+        let mut v = []/~;
         while self.token != token::GT
             && self.token != token::BINOP(token::SHR) {
             alt sep {
@@ -166,7 +166,7 @@ impl parser_common for parser {
     }
 
     fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
-                                f: fn(parser) -> T) -> [T] {
+                                f: fn(parser) -> T) -> [T]/~ {
         let v = self.parse_seq_to_before_gt(sep, f);
         self.expect_gt();
 
@@ -174,7 +174,7 @@ impl parser_common for parser {
     }
 
     fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
-                                f: fn(parser) -> T) -> spanned<[T]> {
+                                f: fn(parser) -> T) -> spanned<[T]/~> {
         let lo = self.span.lo;
         self.expect(token::LT);
         let result = self.parse_seq_to_before_gt::<T>(sep, f);
@@ -184,7 +184,7 @@ impl parser_common for parser {
     }
 
     fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> [T] {
+                                 f: fn(parser) -> T) -> [T]/~ {
         let val = self.parse_seq_to_before_end(ket, sep, f);
         self.bump();
         ret val;
@@ -192,9 +192,9 @@ impl parser_common for parser {
 
 
     fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> [T] {
+                                        f: fn(parser) -> T) -> [T]/~ {
         let mut first: bool = true;
-        let mut v: [T] = [];
+        let mut v: [T]/~ = []/~;
         while self.token != ket {
             alt sep.sep {
               some(t) { if first { first = false; }
@@ -207,8 +207,10 @@ impl parser_common for parser {
         ret v;
     }
 
-    fn parse_unspanned_seq<T: copy>(bra: token::token, ket: token::token,
-                                    sep: seq_sep, f: fn(parser) -> T) -> [T] {
+    fn parse_unspanned_seq<T: copy>(bra: token::token,
+                                    ket: token::token,
+                                    sep: seq_sep,
+                                    f: fn(parser) -> T) -> [T]/~ {
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
         self.bump();
@@ -218,7 +220,7 @@ impl parser_common for parser {
     // NB: Do not use this function unless you actually plan to place the
     // spanned list in the AST.
     fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<[T]> {
+                          f: fn(parser) -> T) -> spanned<[T]/~> {
         let lo = self.span.lo;
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index ae11c883443..f1dd8d69cc1 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -7,24 +7,26 @@ type ctx =
     @{sess: parse::parse_sess,
       cfg: ast::crate_cfg};
 
-fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str,
-                         &view_items: [@ast::view_item],
-                         &items: [@ast::item]) {
+fn eval_crate_directives(cx: ctx,
+                         cdirs: [@ast::crate_directive]/~,
+                         prefix: str,
+                         &view_items: [@ast::view_item]/~,
+                         &items: [@ast::item]/~) {
     for cdirs.each {|sub_cdir|
         eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
     }
 }
 
-fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive],
+fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive]/~,
                                 prefix: str, suffix: option<str>)
-    -> (ast::_mod, [ast::attribute]) {
+    -> (ast::_mod, [ast::attribute]/~) {
     #debug("eval crate prefix: %s", prefix);
     #debug("eval crate suffix: %s",
            option::get_default(suffix, "none"));
     let (cview_items, citems, cattrs)
         = parse_companion_mod(cx, prefix, suffix);
-    let mut view_items: [@ast::view_item] = [];
-    let mut items: [@ast::item] = [];
+    let mut view_items: [@ast::view_item]/~ = []/~;
+    let mut items: [@ast::item]/~ = []/~;
     eval_crate_directives(cx, cdirs, prefix, view_items, items);
     ret ({view_items: view_items + cview_items,
           items: items + citems},
@@ -42,7 +44,7 @@ We build the path to the companion mod by combining the prefix and the
 optional suffix then adding the .rs extension.
 */
 fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
-    -> ([@ast::view_item], [@ast::item], [ast::attribute]) {
+    -> ([@ast::view_item]/~, [@ast::item]/~, [ast::attribute]/~) {
 
     fn companion_file(+prefix: str, suffix: option<str>) -> str {
         ret alt suffix {
@@ -72,11 +74,11 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
         cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
         ret (m0.view_items, m0.items, inner_attrs.inner);
     } else {
-        ret ([], [], []);
+        ret ([]/~, []/~, []/~);
     }
 }
 
-fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str {
+fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]/~) -> @str {
     alt ::attr::first_attr_value_str_by_name(attrs, "path") {
       some(d) {
         ret d;
@@ -86,8 +88,8 @@ fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str {
 }
 
 fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
-                        &view_items: [@ast::view_item],
-                        &items: [@ast::item]) {
+                        &view_items: [@ast::view_item]/~,
+                        &items: [@ast::item]/~) {
     alt cdir.node {
       ast::cdir_src_mod(id, attrs) {
         let file_path = cdir_path_opt(@(*id + ".rs"), attrs);
@@ -108,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
         // Thread defids, chpos and byte_pos through the parsers
         cx.sess.chpos = r0.chpos;
         cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
-        items += [i];
+        items += [i]/~;
       }
       ast::cdir_dir_mod(id, cdirs, attrs) {
         let path = cdir_path_opt(id, attrs);
@@ -126,9 +128,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
               vis: ast::public,
               span: cdir.span};
         cx.sess.next_id += 1;
-        items += [i];
+        items += [i]/~;
       }
-      ast::cdir_view_item(vi) { view_items += [vi]; }
+      ast::cdir_view_item(vi) { view_items += [vi]/~; }
       ast::cdir_syntax(pth) { }
     }
 }
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 5a3dceace8d..8687e011635 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -26,7 +26,7 @@ enum tt_frame_up { /* to break a circularity */
 /* TODO: figure out how to have a uniquely linked stack, and change to `~` */
 #[doc = "an unzipping of `token_tree`s"]
 type tt_frame = @{
-    readme: [ast::token_tree],
+    readme: [ast::token_tree]/~,
     mut idx: uint,
     up: tt_frame_up
 };
@@ -41,7 +41,7 @@ type tt_reader = @{
 };
 
 fn new_tt_reader(span_diagnostic: diagnostic::span_handler,
-                 itr: @interner::interner<@str>, src: [ast::token_tree])
+                 itr: @interner::interner<@str>, src: [ast::token_tree]/~)
     -> tt_reader {
     let r = @{span_diagnostic: span_diagnostic, interner: itr,
               mut cur: @{readme: src, mut idx: 0u,
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 13b68b2ce70..d0847a974b7 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -13,6 +13,7 @@ import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
                 seq_sep_none, token_to_str};
 import common::*;//{parser_common};
 import dvec::{dvec, extensions};
+import vec::{push};
 
 export file_type;
 export parser;
@@ -51,10 +52,10 @@ enum pexpr {
  */
 enum class_contents { ctor_decl(fn_decl, blk, codemap::span),
                       dtor_decl(blk, codemap::span),
-                      members([@class_member]) }
+                      members([@class_member]/~) }
 
 type arg_or_capture_item = either<arg, capture_item>;
-type item_info = (ident, item_, option<[attribute]>);
+type item_info = (ident, item_, option<[attribute]/~>);
 
 class parser {
     let sess: parse_sess;
@@ -176,14 +177,14 @@ class parser {
         // functions can't have constrained types. Not sure whether
         // that would be desirable anyway. See bug for the story on
         // constrained types.
-        let constrs: [@constr] = [];
+        let constrs: [@constr]/~ = []/~;
         let (ret_style, ret_ty) = self.parse_ret_ty();
         ret {inputs: inputs, output: ret_ty,
              purity: purity, cf: ret_style,
              constraints: constrs};
     }
 
-    fn parse_ty_methods() -> [ty_method] {
+    fn parse_ty_methods() -> [ty_method]/~ {
         self.parse_unspanned_seq(token::LBRACE, token::RBRACE,
                                  seq_sep_none()) { |p|
             let attrs = p.parse_outer_attributes();
@@ -215,7 +216,7 @@ class parser {
 
     // if i is the jth ident in args, return j
     // otherwise, fail
-    fn ident_index(args: [arg], i: ident) -> uint {
+    fn ident_index(args: [arg]/~, i: ident) -> uint {
         let mut j = 0u;
         for args.each {|a| if a.ident == i { ret j; } j += 1u; }
         self.fatal("unbound variable `" + *i + "` in constraint arg");
@@ -235,7 +236,7 @@ class parser {
         ret @{node: carg, span: sp};
     }
 
-    fn parse_constr_arg(args: [arg]) -> @constr_arg {
+    fn parse_constr_arg(args: [arg]/~) -> @constr_arg {
         let sp = self.span;
         let mut carg = carg_base;
         if self.token == token::BINOP(token::STAR) {
@@ -247,7 +248,7 @@ class parser {
         ret @{node: carg, span: sp};
     }
 
-    fn parse_ty_constr(fn_args: [arg]) -> @constr {
+    fn parse_ty_constr(fn_args: [arg]/~) -> @constr {
         let lo = self.span.lo;
         let path = self.parse_path_without_tps();
         let args = self.parse_unspanned_seq(
@@ -261,7 +262,7 @@ class parser {
     fn parse_constr_in_type() -> @ty_constr {
         let lo = self.span.lo;
         let path = self.parse_path_without_tps();
-        let args: [@ty_constr_arg] = self.parse_unspanned_seq(
+        let args: [@ty_constr_arg]/~ = self.parse_unspanned_seq(
             token::LPAREN, token::RPAREN,
             seq_sep_trailing_disallowed(token::COMMA),
             {|p| p.parse_type_constr_arg()});
@@ -272,17 +273,17 @@ class parser {
 
 
     fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>) ->
-        [@constr_general<T>] {
-        let mut constrs: [@constr_general<T>] = [];
+        [@constr_general<T>]/~ {
+        let mut constrs: [@constr_general<T>]/~ = []/~;
         loop {
             let constr = pser(self);
-            constrs += [constr];
+            constrs += [constr]/~;
             if self.token == token::COMMA { self.bump(); }
             else { ret constrs; }
         };
     }
 
-    fn parse_type_constraints() -> [@ty_constr] {
+    fn parse_type_constraints() -> [@ty_constr]/~ {
         ret self.parse_constrs({|p| p.parse_constr_in_type()});
     }
 
@@ -359,10 +360,10 @@ class parser {
                 self.bump();
                 ty_nil
             } else {
-                let mut ts = [self.parse_ty(false)];
+                let mut ts = [self.parse_ty(false)]/~;
                 while self.token == token::COMMA {
                     self.bump();
-                    ts += [self.parse_ty(false)];
+                    ts += [self.parse_ty(false)]/~;
                 }
                 let t = if vec::len(ts) == 1u { ts[0].node }
                 else { ty_tup(ts) };
@@ -583,22 +584,22 @@ class parser {
 
         let lo = self.span.lo;
         let global = self.eat(token::MOD_SEP);
-        let mut ids = [];
+        let mut ids = []/~;
         loop {
             let is_not_last =
                 self.look_ahead(2u) != token::LT
                 && self.look_ahead(1u) == token::MOD_SEP;
 
             if is_not_last {
-                ids += [parse_ident(self)];
+                ids += [parse_ident(self)]/~;
                 self.expect(token::MOD_SEP);
             } else {
-                ids += [parse_last_ident(self)];
+                ids += [parse_last_ident(self)]/~;
                 break;
             }
         }
         @{span: mk_sp(lo, self.last_span.hi), global: global,
-          idents: ids, rp: none, types: []}
+          idents: ids, rp: none, types: []/~}
     }
 
     fn parse_value_path() -> @path {
@@ -639,7 +640,7 @@ class parser {
                 self.parse_seq_lt_gt(some(token::COMMA),
                                      {|p| p.parse_ty(false)})
             } else {
-                {node: [], span: path.span}
+                {node: []/~, span: path.span}
             }
         };
 
@@ -715,9 +716,9 @@ class parser {
                 let lit = @spanned(lo, hi, lit_nil);
                 ret self.mk_pexpr(lo, hi, expr_lit(lit));
             }
-            let mut es = [self.parse_expr()];
+            let mut es = [self.parse_expr()]/~;
             while self.token == token::COMMA {
-                self.bump(); es += [self.parse_expr()];
+                self.bump(); es += [self.parse_expr()]/~;
             }
             hi = self.span.hi;
             self.expect(token::RPAREN);
@@ -733,7 +734,7 @@ class parser {
             if self.is_keyword("mut") ||
                 is_plain_ident(self.token)
                 && self.look_ahead(1u) == token::COLON {
-                let mut fields = [self.parse_field(token::COLON)];
+                let mut fields = [self.parse_field(token::COLON)]/~;
                 let mut base = none;
                 while self.token != token::RBRACE {
                     // optional comma before "with"
@@ -750,7 +751,7 @@ class parser {
                         // record ends by an optional trailing comma
                         break;
                     }
-                    fields += [self.parse_field(token::COLON)];
+                    fields += [self.parse_field(token::COLON)]/~;
                 }
                 hi = self.span.hi;
                 self.expect(token::RBRACE);
@@ -997,7 +998,7 @@ class parser {
                         self.expect(token::LT);
                         self.parse_seq_to_gt(some(token::COMMA),
                                              {|p| p.parse_ty(false)})
-                    } else { [] };
+                    } else { []/~ };
                     e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e),
                                                          self.get_str(i),
                                                          tys));
@@ -1027,13 +1028,13 @@ class parser {
                 let blk = self.parse_fn_block_expr();
                 alt e.node {
                   expr_call(f, args, false) {
-                    e = pexpr(@{node: expr_call(f, args + [blk], true)
+                    e = pexpr(@{node: expr_call(f, args + [blk]/~, true)
                                 with *self.to_expr(e)});
                   }
                   _ {
                     e = self.mk_pexpr(lo, self.last_span.hi,
                                       expr_call(self.to_expr(e),
-                                                [blk], true));
+                                                [blk]/~, true));
                   }
                 }
               }
@@ -1085,10 +1086,10 @@ class parser {
         ret alt self.token {
           token::LPAREN | token::LBRACE | token::LBRACKET {
             let ket = flip(self.token);
-            tt_delim([parse_tt_flat(self, true)] +
+            tt_delim([parse_tt_flat(self, true)]/~ +
                      self.parse_seq_to_before_end(ket, seq_sep_none(),
                                                   {|p| p.parse_token_tree()})
-                     + [parse_tt_flat(self, true)])
+                     + [parse_tt_flat(self, true)]/~)
           }
           _ { parse_tt_flat(self, false) }
         };
@@ -1354,7 +1355,7 @@ class parser {
             let b_arg = vec::last(args);
             let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi,
                                     ctor(b_arg));
-            @{node: expr_call(f, vec::init(args) + [last], true)
+            @{node: expr_call(f, vec::init(args) + [last]/~, true)
               with *call}
           }
           _ {
@@ -1385,14 +1386,14 @@ class parser {
         else { alt_exhaustive };
         let discriminant = self.parse_expr();
         self.expect(token::LBRACE);
-        let mut arms: [arm] = [];
+        let mut arms: [arm]/~ = []/~;
         while self.token != token::RBRACE {
             let pats = self.parse_pats();
             let mut guard = none;
             if self.eat_keyword("if") { guard = some(self.parse_expr()); }
             if self.token == token::FAT_ARROW { self.bump(); }
             let blk = self.parse_block();
-            arms += [{pats: pats, guard: guard, body: blk}];
+            arms += [{pats: pats, guard: guard, body: blk}]/~;
         }
         let mut hi = self.span.hi;
         self.bump();
@@ -1434,10 +1435,10 @@ class parser {
         }
     }
 
-    fn parse_pats() -> [@pat] {
-        let mut pats = [];
+    fn parse_pats() -> [@pat]/~ {
+        let mut pats = []/~;
         loop {
-            pats += [self.parse_pat()];
+            pats += [self.parse_pat()]/~;
             if self.token == token::BINOP(token::OR) { self.bump(); }
             else { ret pats; }
         };
@@ -1463,7 +1464,7 @@ class parser {
           }
           token::LBRACE {
             self.bump();
-            let mut fields = [];
+            let mut fields = []/~;
             let mut etc = false;
             let mut first = true;
             while self.token != token::RBRACE {
@@ -1498,7 +1499,7 @@ class parser {
                                node: pat_ident(fieldpath, none),
                                span: mk_sp(lo, hi)};
                 }
-                fields += [{ident: fieldname, pat: subpat}];
+                fields += [{ident: fieldname, pat: subpat}]/~;
             }
             hi = self.span.hi;
             self.bump();
@@ -1513,10 +1514,10 @@ class parser {
                 let expr = self.mk_expr(lo, hi, expr_lit(lit));
                 pat = pat_lit(expr);
             } else {
-                let mut fields = [self.parse_pat()];
+                let mut fields = [self.parse_pat()]/~;
                 while self.token == token::COMMA {
                     self.bump();
-                    fields += [self.parse_pat()];
+                    fields += [self.parse_pat()]/~;
                 }
                 if vec::len(fields) == 1u { self.expect(token::COMMA); }
                 hi = self.span.hi;
@@ -1548,7 +1549,7 @@ class parser {
             } else {
                 let enum_path = self.parse_path_with_tps(true);
                 hi = enum_path.span.hi;
-                let mut args: [@pat] = [];
+                let mut args: [@pat]/~ = []/~;
                 let mut star_pat = false;
                 alt self.token {
                   token::LPAREN {
@@ -1604,9 +1605,9 @@ class parser {
     fn parse_let() -> @decl {
         let is_mutbl = self.eat_keyword("mut");
         let lo = self.span.lo;
-        let mut locals = [self.parse_local(is_mutbl, true)];
+        let mut locals = [self.parse_local(is_mutbl, true)]/~;
         while self.eat(token::COMMA) {
-            locals += [self.parse_local(is_mutbl, true)];
+            locals += [self.parse_local(is_mutbl, true)]/~;
         }
         ret @spanned(lo, self.last_span.hi, decl_local(locals));
     }
@@ -1628,8 +1629,8 @@ class parser {
               span: mk_sp(lo, self.last_span.hi)};
     }
 
-    fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt {
-        fn check_expected_item(p: parser, current_attrs: [attribute]) {
+    fn parse_stmt(+first_item_attrs: [attribute]/~) -> @stmt {
+        fn check_expected_item(p: parser, current_attrs: [attribute]/~) {
             // If we have attributes then we should have an item
             if vec::is_not_empty(current_attrs) {
                 p.fatal("expected item");
@@ -1645,7 +1646,7 @@ class parser {
         } else {
             let mut item_attrs;
             alt self.parse_outer_attrs_or_ext(first_item_attrs) {
-              none { item_attrs = []; }
+              none { item_attrs = []/~; }
               some(left(attrs)) { item_attrs = attrs; }
               some(right(ext)) {
                 ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id()));
@@ -1685,14 +1686,15 @@ class parser {
         ret blk;
     }
 
-    fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) {
+    fn parse_inner_attrs_and_block(parse_attrs: bool)
+        -> ([attribute]/~, blk) {
 
         fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
-            {inner: [attribute], next: [attribute]} {
+            {inner: [attribute]/~, next: [attribute]/~} {
             if parse_attrs {
                 p.parse_inner_attrs_and_next()
             } else {
-                {inner: [], next: []}
+                {inner: []/~, next: []/~}
             }
         }
 
@@ -1727,12 +1729,12 @@ class parser {
     // necessary, and this should take a qualifier.
     // some blocks start with "#{"...
     fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk {
-        self.parse_block_tail_(lo, s, [])
+        self.parse_block_tail_(lo, s, []/~)
     }
 
     fn parse_block_tail_(lo: uint, s: blk_check_mode,
-                         +first_item_attrs: [attribute]) -> blk {
-        let mut stmts = [];
+                         +first_item_attrs: [attribute]/~) -> blk {
+        let mut stmts = []/~;
         let mut expr = none;
         let {attrs_remaining, view_items} =
             self.parse_view(first_item_attrs, true);
@@ -1749,13 +1751,14 @@ class parser {
               }
               _ {
                 let stmt = self.parse_stmt(initial_attrs);
-                initial_attrs = [];
+                initial_attrs = []/~;
                 alt stmt.node {
                   stmt_expr(e, stmt_id) { // Expression without semicolon:
                     alt self.token {
                       token::SEMI {
                         self.bump();
-                        stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}];
+                        push(stmts,
+                             @{node: stmt_semi(e, stmt_id) with *stmt});
                       }
                       token::RBRACE {
                         expr = some(e);
@@ -1766,13 +1769,13 @@ class parser {
                                         but found '"
                                        + token_to_str(self.reader, t) + "'");
                         }
-                        stmts += [stmt];
+                        stmts += [stmt]/~;
                       }
                     }
                   }
 
                   _ { // All other kinds of statements:
-                    stmts += [stmt];
+                    stmts += [stmt]/~;
 
                     if classify::stmt_ends_with_semi(*stmt) {
                         self.expect(token::SEMI);
@@ -1790,30 +1793,32 @@ class parser {
     }
 
     fn parse_ty_param() -> ty_param {
-        let mut bounds = [];
+        let mut bounds = []/~;
         let ident = self.parse_ident();
         if self.eat(token::COLON) {
             while self.token != token::COMMA && self.token != token::GT {
-                if self.eat_keyword("send") { bounds += [bound_send]; }
-                else if self.eat_keyword("copy") { bounds += [bound_copy]; }
-                else if self.eat_keyword("const") { bounds += [bound_const]; }
-                else { bounds += [bound_iface(self.parse_ty(false))]; }
+                if self.eat_keyword("send") { push(bounds, bound_send); }
+                else if self.eat_keyword("copy") { push(bounds, bound_copy) }
+                else if self.eat_keyword("const") {
+                    push(bounds, bound_const)
+                }
+                else { push(bounds, bound_iface(self.parse_ty(false))); }
             }
         }
         ret {ident: ident, id: self.get_id(), bounds: @bounds};
     }
 
-    fn parse_ty_params() -> [ty_param] {
+    fn parse_ty_params() -> [ty_param]/~ {
         if self.eat(token::LT) {
             self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty_param()})
-        } else { [] }
+        } else { []/~ }
     }
 
     fn parse_fn_decl(purity: purity,
                      parse_arg_fn: fn(parser) -> arg_or_capture_item)
         -> (fn_decl, capture_clause) {
 
-        let args_or_capture_items: [arg_or_capture_item] =
+        let args_or_capture_items: [arg_or_capture_item]/~ =
             self.parse_unspanned_seq(
                 token::LPAREN, token::RPAREN,
                 seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn);
@@ -1824,7 +1829,7 @@ class parser {
         // Use the args list to translate each bound variable
         // mentioned in a constraint to an arg index.
         // Seems weird to do this in the parser, but I'm not sure how else to.
-        let mut constrs = [];
+        let mut constrs = []/~;
         if self.token == token::COLON {
             self.bump();
             constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) });
@@ -1840,7 +1845,7 @@ class parser {
     fn parse_fn_block_decl() -> (fn_decl, capture_clause) {
         let inputs_captures = {
             if self.eat(token::OROR) {
-                []
+                []/~
             } else {
                 self.parse_unspanned_seq(
                     token::BINOP(token::OR), token::BINOP(token::OR),
@@ -1857,11 +1862,11 @@ class parser {
               output: output,
               purity: impure_fn,
               cf: return_val,
-              constraints: []},
+              constraints: []/~},
              @either::rights(inputs_captures));
     }
 
-    fn parse_fn_header() -> {ident: ident, tps: [ty_param]} {
+    fn parse_fn_header() -> {ident: ident, tps: [ty_param]/~} {
         let id = self.parse_value_ident();
         let ty_params = self.parse_ty_params();
         ret {ident: id, tps: ty_params};
@@ -1869,7 +1874,7 @@ class parser {
 
     fn mk_item(lo: uint, hi: uint, +ident: ident,
                +node: item_, vis: visibility,
-               +attrs: [attribute]) -> @item {
+               +attrs: [attribute]/~) -> @item {
         ret @{ident: ident,
               attrs: attrs,
               id: self.get_id(),
@@ -1922,9 +1927,9 @@ class parser {
     }
 
     // Parses three variants (with the region/type params always optional):
-    //    impl /&<T: copy> of to_str for [T] { ... }
-    //    impl name/&<T> of to_str for [T] { ... }
-    //    impl name/&<T> for [T] { ... }
+    //    impl /&<T: copy> of to_str for [T]/~ { ... }
+    //    impl name/&<T> of to_str for [T]/~ { ... }
+    //    impl name/&<T> for [T]/~ { ... }
     fn parse_item_impl() -> item_info {
         fn wrap_path(p: parser, pt: @path) -> @ty {
             @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
@@ -1936,7 +1941,7 @@ class parser {
                 (none, self.parse_region_param(), self.parse_ty_params())
             }
             else if self.is_keyword("of") {
-                (none, rp_none, [])
+                (none, rp_none, []/~)
             } else {
                 let id = self.parse_ident();
                 let rp = self.parse_region_param();
@@ -1956,10 +1961,10 @@ class parser {
         };
         self.expect_keyword("for");
         let ty = self.parse_ty(false);
-        let mut meths = [];
+        let mut meths = []/~;
         self.expect(token::LBRACE);
         while !self.eat(token::RBRACE) {
-            meths += [self.parse_method(public)];
+            meths += [self.parse_method(public)]/~;
         }
         (ident, item_impl(tps, rp, ifce, ty, meths), none)
     }
@@ -1969,7 +1974,7 @@ class parser {
     // the return type of the ctor function.
     fn ident_to_path_tys(i: ident,
                          rp: region_param,
-                         typarams: [ty_param]) -> @path {
+                         typarams: [ty_param]/~) -> @path {
         let s = self.last_span;
 
         // Hack.  But then, this whole function is in service of a hack.
@@ -1978,7 +1983,7 @@ class parser {
           rp_self { some(self.region_from_name(some(@"self"))) }
         };
 
-        @{span: s, global: false, idents: [i],
+        @{span: s, global: false, idents: [i]/~,
           rp: a_r,
           types: vec::map(typarams, {|tp|
               @{id: self.get_id(),
@@ -1992,7 +1997,7 @@ class parser {
           id: self.get_id()}
     }
 
-    fn parse_iface_ref_list() -> [@iface_ref] {
+    fn parse_iface_ref_list() -> [@iface_ref]/~ {
         self.parse_seq_to_before_end(
             token::LBRACE, seq_sep_trailing_disallowed(token::COMMA),
             {|p| p.parse_iface_ref()})
@@ -2003,11 +2008,11 @@ class parser {
         let rp = self.parse_region_param();
         let ty_params = self.parse_ty_params();
         let class_path = self.ident_to_path_tys(class_name, rp, ty_params);
-        let ifaces : [@iface_ref] = if self.eat(token::COLON)
+        let ifaces : [@iface_ref]/~ = if self.eat(token::COLON)
             { self.parse_iface_ref_list() }
-        else { [] };
+        else { []/~ };
         self.expect(token::LBRACE);
-        let mut ms: [@class_member] = [];
+        let mut ms: [@class_member]/~ = []/~;
         let ctor_id = self.get_id();
         let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none;
         let mut the_dtor : option<(blk, codemap::span)> = none;
@@ -2092,16 +2097,16 @@ class parser {
         }
         else if self.eat_keyword("priv") {
             self.expect(token::LBRACE);
-            let mut results = [];
+            let mut results = []/~;
             while self.token != token::RBRACE {
-                results += [self.parse_single_class_item(private)];
+                results += [self.parse_single_class_item(private)]/~;
             }
             self.bump();
             ret members(results);
         }
         else {
             // Probably need to parse attrs
-            ret members([self.parse_single_class_item(public)]);
+            ret members([self.parse_single_class_item(public)]/~);
         }
     }
 
@@ -2112,11 +2117,11 @@ class parser {
     }
 
     fn parse_mod_items(term: token::token,
-                       +first_item_attrs: [attribute]) -> _mod {
+                       +first_item_attrs: [attribute]/~) -> _mod {
         // Shouldn't be any view items since we've already parsed an item attr
         let {attrs_remaining, view_items} =
             self.parse_view(first_item_attrs, false);
-        let mut items: [@item] = [];
+        let mut items: [@item]/~ = []/~;
         let mut first = true;
         while self.token != term {
             let mut attrs = self.parse_outer_attributes();
@@ -2124,7 +2129,7 @@ class parser {
             #debug["parse_mod_items: parse_item(attrs=%?)", attrs];
             let vis = self.parse_visibility(private);
             alt self.parse_item(attrs, vis) {
-              some(i) { items += [i]; }
+              some(i) { items += [i]/~; }
               _ {
                 self.fatal("expected item but found '" +
                            token_to_str(self.reader, self.token) + "'");
@@ -2160,7 +2165,7 @@ class parser {
         (id, item_mod(m), some(inner_attrs.inner))
     }
 
-    fn parse_item_native_fn(+attrs: [attribute],
+    fn parse_item_native_fn(+attrs: [attribute]/~,
                             purity: purity) -> @native_item {
         let lo = self.last_span.lo;
         let t = self.parse_fn_header();
@@ -2186,22 +2191,22 @@ class parser {
         else { self.unexpected(); }
     }
 
-    fn parse_native_item(+attrs: [attribute]) ->
+    fn parse_native_item(+attrs: [attribute]/~) ->
         @native_item {
         self.parse_item_native_fn(attrs, self.parse_fn_purity())
     }
 
-    fn parse_native_mod_items(+first_item_attrs: [attribute]) ->
+    fn parse_native_mod_items(+first_item_attrs: [attribute]/~) ->
         native_mod {
         // Shouldn't be any view items since we've already parsed an item attr
         let {attrs_remaining, view_items} =
             self.parse_view(first_item_attrs, false);
-        let mut items: [@native_item] = [];
+        let mut items: [@native_item]/~ = []/~;
         let mut initial_attrs = attrs_remaining;
         while self.token != token::RBRACE {
             let attrs = initial_attrs + self.parse_outer_attributes();
-            initial_attrs = [];
-            items += [self.parse_native_item(attrs)];
+            initial_attrs = []/~;
+            items += [self.parse_native_item(attrs)]/~;
         }
         ret {view_items: view_items,
              items: items};
@@ -2246,7 +2251,7 @@ class parser {
         let id = self.parse_ident();
         let rp = self.parse_region_param();
         let ty_params = self.parse_ty_params();
-        let mut variants: [variant] = [];
+        let mut variants: [variant]/~ = []/~;
         // Newtype syntax
         if self.token == token::EQ {
             self.check_restricted_keywords_(*id);
@@ -2256,12 +2261,12 @@ class parser {
             let variant =
                 spanned(ty.span.lo, ty.span.hi,
                         {name: id,
-                         attrs: [],
-                         args: [{ty: ty, id: self.get_id()}],
+                         attrs: []/~,
+                         args: [{ty: ty, id: self.get_id()}]/~,
                          id: self.get_id(),
                          disr_expr: none,
                          vis: public});
-            ret (id, item_enum([variant], ty_params, rp), none);
+            ret (id, item_enum([variant]/~, ty_params, rp), none);
         }
         self.expect(token::LBRACE);
 
@@ -2272,7 +2277,7 @@ class parser {
             let vlo = self.span.lo;
             let vis = self.parse_visibility(default_vis);
             let ident = self.parse_value_ident();
-            let mut args = [], disr_expr = none;
+            let mut args = []/~, disr_expr = none;
             if self.token == token::LPAREN {
                 all_nullary = false;
                 let arg_tys = self.parse_unspanned_seq(
@@ -2280,7 +2285,7 @@ class parser {
                     seq_sep_trailing_disallowed(token::COMMA),
                     {|p| p.parse_ty(false)});
                 for arg_tys.each {|ty|
-                    args += [{ty: ty, id: self.get_id()}];
+                    args += [{ty: ty, id: self.get_id()}]/~;
                 }
             } else if self.eat(token::EQ) {
                 have_disr = true;
@@ -2290,7 +2295,7 @@ class parser {
             let vr = {name: ident, attrs: variant_attrs,
                       args: args, id: self.get_id(),
                       disr_expr: disr_expr, vis: vis};
-            variants += [spanned(vlo, self.last_span.hi, vr)];
+            variants += [spanned(vlo, self.last_span.hi, vr)]/~;
 
             if !self.eat(token::COMMA) { break; }
         }
@@ -2333,7 +2338,7 @@ class parser {
         }
     }
 
-    fn parse_item(+attrs: [attribute], vis: visibility)
+    fn parse_item(+attrs: [attribute]/~, vis: visibility)
         -> option<@item> {
         let lo = self.span.lo;
         let (ident, item_, extra_attrs) = if self.eat_keyword("const") {
@@ -2384,20 +2389,20 @@ class parser {
     fn parse_view_path() -> @view_path {
         let lo = self.span.lo;
         let first_ident = self.parse_ident();
-        let mut path = [first_ident];
+        let mut path = [first_ident]/~;
         #debug("parsed view_path: %s", *first_ident);
         alt self.token {
           token::EQ {
             // x = foo::bar
             self.bump();
-            path = [self.parse_ident()];
+            path = [self.parse_ident()]/~;
             while self.token == token::MOD_SEP {
                 self.bump();
                 let id = self.parse_ident();
-                path += [id];
+                path += [id]/~;
             }
             let path = @{span: mk_sp(lo, self.span.hi), global: false,
-                         idents: path, rp: none, types: []};
+                         idents: path, rp: none, types: []/~};
             ret @spanned(lo, self.span.hi,
                          view_path_simple(first_ident, path, self.get_id()));
           }
@@ -2411,7 +2416,7 @@ class parser {
 
                   token::IDENT(i, _) {
                     self.bump();
-                    path += [self.get_str(i)];
+                    path += [self.get_str(i)]/~;
                   }
 
                   // foo::bar::{a,b,c}
@@ -2422,7 +2427,7 @@ class parser {
                         {|p| p.parse_path_list_ident()});
                     let path = @{span: mk_sp(lo, self.span.hi),
                                  global: false, idents: path,
-                                 rp: none, types: []};
+                                 rp: none, types: []/~};
                     ret @spanned(lo, self.span.hi,
                                  view_path_list(path, idents, self.get_id()));
                   }
@@ -2432,7 +2437,7 @@ class parser {
                     self.bump();
                     let path = @{span: mk_sp(lo, self.span.hi),
                                  global: false, idents: path,
-                                 rp: none, types: []};
+                                 rp: none, types: []/~};
                     ret @spanned(lo, self.span.hi,
                                  view_path_glob(path, self.get_id()));
                   }
@@ -2445,16 +2450,16 @@ class parser {
         }
         let last = path[vec::len(path) - 1u];
         let path = @{span: mk_sp(lo, self.span.hi), global: false,
-                     idents: path, rp: none, types: []};
+                     idents: path, rp: none, types: []/~};
         ret @spanned(lo, self.span.hi,
                      view_path_simple(last, path, self.get_id()));
     }
 
-    fn parse_view_paths() -> [@view_path] {
-        let mut vp = [self.parse_view_path()];
+    fn parse_view_paths() -> [@view_path]/~ {
+        let mut vp = [self.parse_view_path()]/~;
         while self.token == token::COMMA {
             self.bump();
-            vp += [self.parse_view_path()];
+            vp += [self.parse_view_path()]/~;
         }
         ret vp;
     }
@@ -2468,7 +2473,7 @@ class parser {
             || self.token_is_keyword("export", tok)
     }
 
-    fn parse_view_item(+attrs: [attribute]) -> @view_item {
+    fn parse_view_item(+attrs: [attribute]/~) -> @view_item {
         let lo = self.span.lo, vis = self.parse_visibility(private);
         let node = if self.eat_keyword("use") {
             self.parse_use()
@@ -2482,14 +2487,14 @@ class parser {
           vis: vis, span: mk_sp(lo, self.last_span.hi)}
     }
 
-    fn parse_view(+first_item_attrs: [attribute],
-                  only_imports: bool) -> {attrs_remaining: [attribute],
-                                          view_items: [@view_item]} {
+    fn parse_view(+first_item_attrs: [attribute]/~,
+                  only_imports: bool) -> {attrs_remaining: [attribute]/~,
+                                          view_items: [@view_item]/~} {
         let mut attrs = first_item_attrs + self.parse_outer_attributes();
-        let mut items = [];
+        let mut items = []/~;
         while if only_imports { self.is_keyword("import") }
         else { self.is_view_item() } {
-            items += [self.parse_view_item(attrs)];
+            items += [self.parse_view_item(attrs)]/~;
             attrs = self.parse_outer_attributes();
         }
         {attrs_remaining: attrs, view_items: items}
@@ -2502,7 +2507,7 @@ class parser {
         let first_item_outer_attrs = crate_attrs.next;
         let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
         ret @spanned(lo, self.span.lo,
-                     {directives: [],
+                     {directives: []/~,
                       module: m,
                       attrs: crate_attrs.inner,
                       config: self.cfg});
@@ -2523,7 +2528,7 @@ class parser {
     //
     // Each directive imperatively extends its environment with 0 or more
     // items.
-    fn parse_crate_directive(first_outer_attr: [attribute]) ->
+    fn parse_crate_directive(first_outer_attr: [attribute]/~) ->
         crate_directive {
 
         // Collect the next attributes
@@ -2564,8 +2569,8 @@ class parser {
     }
 
     fn parse_crate_directives(term: token::token,
-                              first_outer_attr: [attribute]) ->
-        [@crate_directive] {
+                              first_outer_attr: [attribute]/~) ->
+        [@crate_directive]/~ {
 
         // This is pretty ugly. If we have an outer attribute then we can't
         // accept seeing the terminator next, so if we do see it then fail the
@@ -2574,12 +2579,12 @@ class parser {
             self.expect_keyword("mod");
         }
 
-        let mut cdirs: [@crate_directive] = [];
+        let mut cdirs: [@crate_directive]/~ = []/~;
         let mut first_outer_attr = first_outer_attr;
         while self.token != term {
             let cdir = @self.parse_crate_directive(first_outer_attr);
-            cdirs += [cdir];
-            first_outer_attr = [];
+            cdirs += [cdir]/~;
+            first_outer_attr = []/~;
         }
         ret cdirs;
     }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 9d6427912df..feffbd4020c 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -260,7 +260,7 @@ fn contextual_keyword_table() -> hashmap<str, ()> {
         "with",
         /* temp */
         "sep", "many", "at_least_one", "parse"
-    ];
+    ]/~;
     for keys.each {|word|
         words.insert(word, ());
     }
@@ -298,7 +298,7 @@ fn restricted_keyword_table() -> hashmap<str, ()> {
         "true", "trait", "type",
         "unchecked", "unsafe",
         "while"
-    ];
+    ]/~;
     for keys.each {|word|
         words.insert(word, ());
     }
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 329dff0c4c1..5f10fe0eb47 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -71,7 +71,7 @@ fn tok_str(++t: token) -> str {
     }
 }
 
-fn buf_str(toks: [mut token], szs: [mut int], left: uint, right: uint,
+fn buf_str(toks: [mut token]/~, szs: [mut int]/~, left: uint, right: uint,
            lim: uint) -> str {
     let n = vec::len(toks);
     assert (n == vec::len(szs));
@@ -100,9 +100,9 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer {
     // fall behind.
     let n: uint = 3u * linewidth;
     #debug("mk_printer %u", linewidth);
-    let token: [mut token] = vec::to_mut(vec::from_elem(n, EOF));
-    let size: [mut int] = vec::to_mut(vec::from_elem(n, 0));
-    let scan_stack: [mut uint] = vec::to_mut(vec::from_elem(n, 0u));
+    let token: [mut token]/~ = vec::to_mut(vec::from_elem(n, EOF));
+    let size: [mut int]/~ = vec::to_mut(vec::from_elem(n, 0));
+    let scan_stack: [mut uint]/~ = vec::to_mut(vec::from_elem(n, 0u));
     @{out: out,
       buf_len: n,
       mut margin: linewidth as int,
@@ -206,8 +206,8 @@ type printer = @{
     mut space: int, // number of spaces left on line
     mut left: uint, // index of left side of input stream
     mut right: uint, // index of right side of input stream
-    token: [mut token], // ring-buffr stream goes through
-    size: [mut int], // ring-buffer of calculated sizes
+    token: [mut token]/~, // ring-buffr stream goes through
+    size: [mut int]/~, // ring-buffer of calculated sizes
     mut left_total: int, // running size of stream "...left"
     mut right_total: int, // running size of stream "...right"
     // pseudo-stack, really a ring too. Holds the
@@ -216,7 +216,7 @@ type printer = @{
     // BEGIN (if there is any) on top of it. Stuff is flushed off the
     // bottom as it becomes irrelevant due to the primary ring-buffer
     // advancing.
-    mut scan_stack: [mut uint],
+    mut scan_stack: [mut uint]/~,
     mut scan_stack_empty: bool, // top==bottom disambiguator
     mut top: uint, // index of top of scan_stack
     mut bottom: uint, // index of bottom of scan_stack
@@ -231,7 +231,7 @@ impl printer for printer {
     // be very careful with this!
     fn replace_last_token(t: token) { self.token[self.right] = t; }
     fn pretty_print(t: token) {
-        #debug("pp [%u,%u]", self.left, self.right);
+        #debug("pp [%u,%u]/~", self.left, self.right);
         alt t {
           EOF {
             if !self.scan_stack_empty {
@@ -248,17 +248,17 @@ impl printer for printer {
                 self.left = 0u;
                 self.right = 0u;
             } else { self.advance_right(); }
-            #debug("pp BEGIN/buffer [%u,%u]", self.left, self.right);
+            #debug("pp BEGIN/buffer [%u,%u]/~", self.left, self.right);
             self.token[self.right] = t;
             self.size[self.right] = -self.right_total;
             self.scan_push(self.right);
           }
           END {
             if self.scan_stack_empty {
-                #debug("pp END/print [%u,%u]", self.left, self.right);
+                #debug("pp END/print [%u,%u]/~", self.left, self.right);
                 self.print(t, 0);
             } else {
-                #debug("pp END/buffer [%u,%u]", self.left, self.right);
+                #debug("pp END/buffer [%u,%u]/~", self.left, self.right);
                 self.advance_right();
                 self.token[self.right] = t;
                 self.size[self.right] = -1;
@@ -272,7 +272,7 @@ impl printer for printer {
                 self.left = 0u;
                 self.right = 0u;
             } else { self.advance_right(); }
-            #debug("pp BREAK/buffer [%u,%u]", self.left, self.right);
+            #debug("pp BREAK/buffer [%u,%u]/~", self.left, self.right);
             self.check_stack(0);
             self.scan_push(self.right);
             self.token[self.right] = t;
@@ -281,10 +281,10 @@ impl printer for printer {
           }
           STRING(s, len) {
             if self.scan_stack_empty {
-                #debug("pp STRING/print [%u,%u]", self.left, self.right);
+                #debug("pp STRING/print [%u,%u]/~", self.left, self.right);
                 self.print(t, len);
             } else {
-                #debug("pp STRING/buffer [%u,%u]", self.left, self.right);
+                #debug("pp STRING/buffer [%u,%u]/~", self.left, self.right);
                 self.advance_right();
                 self.token[self.right] = t;
                 self.size[self.right] = len;
@@ -295,7 +295,7 @@ impl printer for printer {
         }
     }
     fn check_stream() {
-        #debug("check_stream [%u, %u] with left_total=%d, right_total=%d",
+        #debug("check_stream [%u, %u]/~ with left_total=%d, right_total=%d",
                self.left, self.right, self.left_total, self.right_total);
         if self.right_total - self.left_total > self.space {
             #debug("scan window is %d, longer than space on line (%d)",
@@ -347,7 +347,7 @@ impl printer for printer {
         assert (self.right != self.left);
     }
     fn advance_left(++x: token, L: int) {
-        #debug("advnce_left [%u,%u], sizeof(%u)=%d", self.left, self.right,
+        #debug("advnce_left [%u,%u]/~, sizeof(%u)=%d", self.left, self.right,
                self.left, L);
         if L >= 0 {
             self.print(x, L);
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index b38f4c35d86..0d3855b4f93 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -26,8 +26,8 @@ fn no_ann() -> pp_ann {
 type ps =
     @{s: pp::printer,
       cm: option<codemap>,
-      comments: option<[comments::cmnt]>,
-      literals: option<[comments::lit]>,
+      comments: option<[comments::cmnt]/~>,
+      literals: option<[comments::lit]/~>,
       mut cur_cmnt: uint,
       mut cur_lit: uint,
       boxes: dvec<pp::breaks>,
@@ -46,8 +46,8 @@ fn end(s: ps) {
 fn rust_printer(writer: io::writer) -> ps {
     ret @{s: pp::mk_printer(writer, default_columns),
           cm: none::<codemap>,
-          comments: none::<[comments::cmnt]>,
-          literals: none::<[comments::lit]>,
+          comments: none::<[comments::cmnt]/~>,
+          literals: none::<[comments::lit]/~>,
           mut cur_cmnt: 0u,
           mut cur_lit: 0u,
           boxes: dvec(),
@@ -97,7 +97,7 @@ fn item_to_str(i: @ast::item) -> str { ret to_str(i, print_item); }
 
 fn attr_to_str(i: ast::attribute) -> str { ret to_str(i, print_attribute); }
 
-fn typarams_to_str(tps: [ast::ty_param]) -> str {
+fn typarams_to_str(tps: [ast::ty_param]/~) -> str {
     ret to_str(tps, print_type_params)
 }
 
@@ -106,7 +106,7 @@ fn path_to_str(&&p: @ast::path) -> str {
 }
 
 fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
-              params: [ast::ty_param]) -> str {
+              params: [ast::ty_param]/~) -> str {
     let buffer = io::mem_buffer();
     let s = rust_printer(io::mem_buffer_writer(buffer));
     print_fn(s, decl, name, params);
@@ -119,15 +119,15 @@ fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
 #[test]
 fn test_fun_to_str() {
     let decl: ast::fn_decl = {
-        inputs: [],
+        inputs: []/~,
         output: @{id: 0,
                   node: ast::ty_nil,
                   span: ast_util::dummy_sp()},
         purity: ast::impure_fn,
         cf: ast::return_val,
-        constraints: []
+        constraints: []/~
     };
-    assert fun_to_str(decl, "a", []) == "fn a()";
+    assert fun_to_str(decl, "a", []/~) == "fn a()";
 }
 
 fn block_to_str(blk: ast::blk) -> str {
@@ -158,8 +158,8 @@ fn variant_to_str(var: ast::variant) -> str {
 fn test_variant_to_str() {
     let var = ast_util::respan(ast_util::dummy_sp(), {
         name: "principle_skinner",
-        attrs: [],
-        args: [],
+        attrs: []/~,
+        args: []/~,
         id: 0,
         disr_expr: none
     });
@@ -254,7 +254,7 @@ fn synth_comment(s: ps, text: str) {
     word(s.s, "*/");
 }
 
-fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
+fn commasep<IN>(s: ps, b: breaks, elts: [IN]/~, op: fn(ps, IN)) {
     box(s, 0u, b);
     let mut first = true;
     for elts.each {|elt|
@@ -265,7 +265,7 @@ fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
 }
 
 
-fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
+fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN]/~, op: fn(ps, IN),
                      get_span: fn(IN) -> codemap::span) {
     box(s, 0u, b);
     let len = vec::len::<IN>(elts);
@@ -284,12 +284,12 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
     end(s);
 }
 
-fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]) {
+fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]/~) {
     fn expr_span(&&expr: @ast::expr) -> codemap::span { ret expr.span; }
     commasep_cmnt(s, b, exprs, print_expr, expr_span);
 }
 
-fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
+fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]/~) {
     print_inner_attributes(s, attrs);
     for _mod.view_items.each {|vitem|
         print_view_item(s, vitem);
@@ -297,7 +297,7 @@ fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
     for _mod.items.each {|item| print_item(s, item); }
 }
 
-fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) {
+fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]/~) {
     print_inner_attributes(s, attrs);
     for nmod.view_items.each {|vitem|
         print_view_item(s, vitem);
@@ -504,7 +504,7 @@ fn print_item(s: ps, &&item: @ast::item) {
           hardbreak_if_not_bol(s);
           maybe_print_comment(s, ctor.span.lo);
           head(s, "new");
-          print_fn_args_and_ret(s, ctor.node.dec, []);
+          print_fn_args_and_ret(s, ctor.node.dec, []/~);
           space(s.s);
           print_block(s, ctor.node.body);
           option::iter(m_dtor) {|dtor|
@@ -626,7 +626,7 @@ fn print_method(s: ps, meth: @ast::method) {
     print_block_with_attrs(s, meth.body, meth.attrs);
 }
 
-fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
+fn print_outer_attributes(s: ps, attrs: [ast::attribute]/~) {
     let mut count = 0;
     for attrs.each {|attr|
         alt attr.node.style {
@@ -637,7 +637,7 @@ fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
     if count > 0 { hardbreak_if_not_bol(s); }
 }
 
-fn print_inner_attributes(s: ps, attrs: [ast::attribute]) {
+fn print_inner_attributes(s: ps, attrs: [ast::attribute]/~) {
     let mut count = 0;
     for attrs.each {|attr|
         alt attr.node.style {
@@ -685,7 +685,7 @@ fn print_block(s: ps, blk: ast::blk) {
     print_possibly_embedded_block(s, blk, block_normal, indent_unit);
 }
 
-fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]) {
+fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]/~) {
     print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs);
 }
 
@@ -694,11 +694,11 @@ enum embed_type { block_macro, block_block_fn, block_normal, }
 fn print_possibly_embedded_block(s: ps, blk: ast::blk, embedded: embed_type,
                                  indented: uint) {
     print_possibly_embedded_block_(
-        s, blk, embedded, indented, []);
+        s, blk, embedded, indented, []/~);
 }
 
 fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
-                                  indented: uint, attrs: [ast::attribute]) {
+                                  indented: uint, attrs: [ast::attribute]/~) {
     alt blk.node.rules {
       ast::unchecked_blk { word(s.s, "unchecked"); }
       ast::unsafe_blk { word(s.s, "unsafe"); }
@@ -811,10 +811,10 @@ fn print_mac(s: ps, m: ast::mac) {
 
 fn print_vstore(s: ps, t: ast::vstore) {
     alt t {
-      ast::vstore_fixed(some(i)) { word_space(s, #fmt("/%u", i)); }
-      ast::vstore_fixed(none) { word_space(s, "/_"); }
-      ast::vstore_uniq { word_space(s, "/~"); }
-      ast::vstore_box { word_space(s, "/@"); }
+      ast::vstore_fixed(some(i)) { word(s.s, #fmt("/%u", i)); }
+      ast::vstore_fixed(none) { word(s.s, "/_"); }
+      ast::vstore_uniq { word(s.s, "/~"); }
+      ast::vstore_box { word(s.s, "/@"); }
       ast::vstore_slice(r) { word(s.s, "/"); print_region(s, r); }
     }
 }
@@ -1259,18 +1259,18 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
 }
 
 fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
-            typarams: [ast::ty_param]) {
+            typarams: [ast::ty_param]/~) {
     alt decl.purity {
       ast::impure_fn { head(s, "fn") }
       _ { head(s, purity_to_str(decl.purity) + " fn") }
     }
     word(s.s, *name);
     print_type_params(s, typarams);
-    print_fn_args_and_ret(s, decl, []);
+    print_fn_args_and_ret(s, decl, []/~);
 }
 
 fn print_fn_args(s: ps, decl: ast::fn_decl,
-                 cap_items: [ast::capture_item]) {
+                 cap_items: [ast::capture_item]/~) {
     commasep(s, inconsistent, decl.inputs, print_arg);
     if cap_items.is_not_empty() {
         let mut first = decl.inputs.is_empty();
@@ -1284,7 +1284,7 @@ fn print_fn_args(s: ps, decl: ast::fn_decl,
 }
 
 fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
-                         cap_items: [ast::capture_item]) {
+                         cap_items: [ast::capture_item]/~) {
     popen(s);
     print_fn_args(s, decl, cap_items);
     pclose(s);
@@ -1301,7 +1301,7 @@ fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
 }
 
 fn print_fn_block_args(s: ps, decl: ast::fn_decl,
-                       cap_items: [ast::capture_item]) {
+                       cap_items: [ast::capture_item]/~) {
     word(s.s, "|");
     print_fn_args(s, decl, cap_items);
     word(s.s, "|");
@@ -1329,7 +1329,7 @@ fn print_arg_mode(s: ps, m: ast::mode) {
     if ms != "" { word(s.s, ms); }
 }
 
-fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) {
+fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]/~) {
     if vec::len(*bounds) > 0u {
         word(s.s, ":");
         for vec::each(*bounds) {|bound|
@@ -1351,7 +1351,7 @@ fn print_region_param(s: ps, rp: ast::region_param) {
     }
 }
 
-fn print_type_params(s: ps, &&params: [ast::ty_param]) {
+fn print_type_params(s: ps, &&params: [ast::ty_param]/~) {
     if vec::len(params) > 0u {
         word(s.s, "<");
         fn printParam(s: ps, param: ast::ty_param) {
@@ -1408,7 +1408,7 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) {
     }
 }
 
-fn print_view_paths(s: ps, vps: [@ast::view_path]) {
+fn print_view_paths(s: ps, vps: [@ast::view_path]/~) {
     commasep(s, inconsistent, vps, print_view_path);
 }
 
@@ -1480,7 +1480,7 @@ fn print_arg(s: ps, input: ast::arg) {
 
 fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
                decl: ast::fn_decl, id: option<ast::ident>,
-               tps: option<[ast::ty_param]>) {
+               tps: option<[ast::ty_param]/~>) {
     ibox(s, indent_unit);
     word(s.s, opt_proto_to_str(opt_proto));
     alt id { some(id) { word(s.s, " "); word(s.s, *id); } _ { } }
@@ -1682,7 +1682,8 @@ fn next_comment(s: ps) -> option<comments::cmnt> {
     }
 }
 
-fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
+fn constr_args_to_str<T>(f: fn@(T) -> str,
+                         args: [@ast::sp_constr_arg<T>]/~) ->
    str {
     let mut comma = false;
     let mut s = "(";
@@ -1727,7 +1728,7 @@ fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
                                              c.node.args);
 }
 
-fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str {
+fn constrs_str<T>(constrs: [T]/~, elt: fn(T) -> str) -> str {
     let mut s = "", colon = true;
     for constrs.each {|c|
         if colon { s += " : "; colon = false; } else { s += ", "; }
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 714097d356d..6a07d7f62ca 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -13,13 +13,13 @@ import codemap::span;
 enum vt<E> { mk_vt(visitor<E>), }
 
 enum fn_kind {
-    fk_item_fn(ident, [ty_param]), //< an item declared with fn()
-    fk_method(ident, [ty_param], @method),
+    fk_item_fn(ident, [ty_param]/~), //< an item declared with fn()
+    fk_method(ident, [ty_param]/~, @method),
     fk_anon(proto, capture_clause),  //< an anonymous function like fn@(...)
     fk_fn_block(capture_clause),     //< a block {||...}
-    fk_ctor(ident, [ty_param], node_id /* self id */,
+    fk_ctor(ident, [ty_param]/~, node_id /* self id */,
             def_id /* parent class id */), // class constructor
-    fk_dtor([ty_param], node_id /* self id */,
+    fk_dtor([ty_param]/~, node_id /* self id */,
             def_id /* parent class id */) // class destructor
 
 }
@@ -33,13 +33,13 @@ fn name_of_fn(fk: fn_kind) -> ident {
     }
 }
 
-fn tps_of_fn(fk: fn_kind) -> [ty_param] {
+fn tps_of_fn(fk: fn_kind) -> [ty_param]/~ {
     alt fk {
       fk_item_fn(_, tps) | fk_method(_, tps, _)
               | fk_ctor(_, tps, _, _) | fk_dtor(tps, _, _) {
           /* FIXME (#2543) */ copy tps
       }
-      fk_anon(*) | fk_fn_block(*) { [] }
+      fk_anon(*) | fk_fn_block(*) { []/~ }
     }
 }
 
@@ -58,7 +58,7 @@ type visitor<E> =
       visit_decl: fn@(@decl, E, vt<E>),
       visit_expr: fn@(@expr, E, vt<E>),
       visit_ty: fn@(@ty, E, vt<E>),
-      visit_ty_params: fn@([ty_param], E, vt<E>),
+      visit_ty_params: fn@([ty_param]/~, E, vt<E>),
       visit_constr: fn@(@path, span, node_id, E, vt<E>),
       visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt<E>),
       visit_class_item: fn@(@class_member, E, vt<E>)};
@@ -256,7 +256,7 @@ fn visit_native_item<E>(ni: @native_item, e: E, v: vt<E>) {
     }
 }
 
-fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
+fn visit_ty_params<E>(tps: [ty_param]/~, e: E, v: vt<E>) {
     for tps.each {|tp|
         for vec::each(*tp.bounds) {|bound|
             alt bound {
@@ -286,7 +286,7 @@ fn visit_method_helper<E>(m: @method, e: E, v: vt<E>) {
 }
 
 // Similar logic to the comment on visit_method_helper - Tim
-fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param],
+fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param]/~,
                               parent_id: def_id, e: E, v: vt<E>) {
     v.visit_fn(fk_ctor(/* FIXME (#2543) */ copy nm,
                        /* FIXME (#2543) */ copy tps,
@@ -295,7 +295,7 @@ fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param],
 
 }
 
-fn visit_class_dtor_helper<E>(dtor: class_dtor, tps: [ty_param],
+fn visit_class_dtor_helper<E>(dtor: class_dtor, tps: [ty_param]/~,
                               parent_id: def_id, e: E, v: vt<E>) {
     v.visit_fn(fk_dtor(/* FIXME (#2543) */ copy tps, dtor.node.self_id,
                        parent_id), ast_util::dtor_dec(),
@@ -337,7 +337,7 @@ fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
     alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } }
 }
 
-fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) {
+fn visit_exprs<E>(exprs: [@expr]/~, e: E, v: vt<E>) {
     for exprs.each {|ex| v.visit_expr(ex, e, v); }
 }
 
@@ -454,7 +454,7 @@ type simple_visitor =
       visit_decl: fn@(@decl),
       visit_expr: fn@(@expr),
       visit_ty: fn@(@ty),
-      visit_ty_params: fn@([ty_param]),
+      visit_ty_params: fn@([ty_param]/~),
       visit_constr: fn@(@path, span, node_id),
       visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id),
       visit_class_item: fn@(@class_member)};
@@ -474,7 +474,7 @@ fn default_simple_visitor() -> simple_visitor {
           visit_decl: fn@(_d: @decl) { },
           visit_expr: fn@(_e: @expr) { },
           visit_ty: simple_ignore_ty,
-          visit_ty_params: fn@(_ps: [ty_param]) {},
+          visit_ty_params: fn@(_ps: [ty_param]/~) {},
           visit_constr: fn@(_p: @path, _sp: span, _id: node_id) { },
           visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span,
                         _id: node_id) { },
@@ -533,7 +533,9 @@ fn mk_simple_visitor(v: simple_visitor) -> vt<()> {
         f(ty);
         visit_ty(ty, e, v);
     }
-    fn v_ty_params(f: fn@([ty_param]), ps: [ty_param], &&e: (), v: vt<()>) {
+    fn v_ty_params(f: fn@([ty_param]/~),
+                   ps: [ty_param]/~,
+                   &&e: (), v: vt<()>) {
         f(ps);
         visit_ty_params(ps, e, v);
     }