about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorMichael Sullivan <sully@msully.net>2012-06-25 20:00:46 -0700
committerMichael Sullivan <sully@msully.net>2012-06-25 20:00:46 -0700
commit329eca6044fdf376a7a89ec7a96dba7a8b884cf7 (patch)
tree7008814278a066914b6ba36818388d5212ffda9f /src/libsyntax/parse
parentc087aaf56b1109163126fea4c2760f8414ffbe56 (diff)
downloadrust-329eca6044fdf376a7a89ec7a96dba7a8b884cf7.tar.gz
rust-329eca6044fdf376a7a89ec7a96dba7a8b884cf7.zip
Make vectors uglier ([]/~). Sorry. Should be temporary. Closes #2725.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs29
-rw-r--r--src/libsyntax/parse/comments.rs36
-rw-r--r--src/libsyntax/parse/common.rs22
-rw-r--r--src/libsyntax/parse/eval.rs32
-rw-r--r--src/libsyntax/parse/lexer.rs4
-rw-r--r--src/libsyntax/parse/parser.rs251
-rw-r--r--src/libsyntax/parse/token.rs4
7 files changed, 194 insertions, 184 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index dad180847ee..4d78bcdc0a9 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -7,11 +7,11 @@ export parser_attr;
 
 // A type to distingush between the parsing of item attributes or syntax
 // extensions, which both begin with token.POUND
-type attr_or_ext = option<either<[ast::attribute], @ast::expr>>;
+type attr_or_ext = option<either<[ast::attribute]/~, @ast::expr>>;
 
 impl parser_attr for parser {
 
-    fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute])
+    fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute]/~)
         -> attr_or_ext
     {
         let expect_item_next = vec::is_not_empty(first_item_attrs);
@@ -21,7 +21,8 @@ impl parser_attr for parser {
                 self.bump();
                 let first_attr =
                     self.parse_attribute_naked(ast::attr_outer, lo);
-                ret some(left([first_attr] + self.parse_outer_attributes()));
+                ret some(left([first_attr]/~ +
+                              self.parse_outer_attributes()));
             } else if !(self.look_ahead(1u) == token::LT
                         || self.look_ahead(1u) == token::LBRACKET
                         || self.look_ahead(1u) == token::POUND
@@ -33,11 +34,11 @@ impl parser_attr for parser {
     }
 
     // Parse attributes that appear before an item
-    fn parse_outer_attributes() -> [ast::attribute] {
-        let mut attrs: [ast::attribute] = [];
+    fn parse_outer_attributes() -> [ast::attribute]/~ {
+        let mut attrs: [ast::attribute]/~ = []/~;
         while self.token == token::POUND
             && self.look_ahead(1u) == token::LBRACKET {
-            attrs += [self.parse_attribute(ast::attr_outer)];
+            attrs += [self.parse_attribute(ast::attr_outer)]/~;
         }
         ret attrs;
     }
@@ -64,9 +65,9 @@ impl parser_attr for parser {
     // is an inner attribute of the containing item or an outer attribute of
     // the first contained item until we see the semi).
     fn parse_inner_attrs_and_next() ->
-        {inner: [ast::attribute], next: [ast::attribute]} {
-        let mut inner_attrs: [ast::attribute] = [];
-        let mut next_outer_attrs: [ast::attribute] = [];
+        {inner: [ast::attribute]/~, next: [ast::attribute]/~} {
+        let mut inner_attrs: [ast::attribute]/~ = []/~;
+        let mut next_outer_attrs: [ast::attribute]/~ = []/~;
         while self.token == token::POUND {
             if self.look_ahead(1u) != token::LBRACKET {
                 // This is an extension
@@ -75,13 +76,13 @@ impl parser_attr for parser {
             let attr = self.parse_attribute(ast::attr_inner);
             if self.token == token::SEMI {
                 self.bump();
-                inner_attrs += [attr];
+                inner_attrs += [attr]/~;
             } else {
                 // It's not really an inner attribute
                 let outer_attr =
                     spanned(attr.span.lo, attr.span.hi,
                             {style: ast::attr_outer, value: attr.node.value});
-                next_outer_attrs += [outer_attr];
+                next_outer_attrs += [outer_attr]/~;
                 break;
             }
         }
@@ -110,15 +111,15 @@ impl parser_attr for parser {
         }
     }
 
-    fn parse_meta_seq() -> [@ast::meta_item] {
+    fn parse_meta_seq() -> [@ast::meta_item]/~ {
         ret self.parse_seq(token::LPAREN, token::RPAREN,
                            seq_sep_trailing_disallowed(token::COMMA),
                            {|p| p.parse_meta_item()}).node;
     }
 
-    fn parse_optional_meta() -> [@ast::meta_item] {
+    fn parse_optional_meta() -> [@ast::meta_item]/~ {
         alt self.token { token::LPAREN { ret self.parse_meta_seq(); }
-                         _ { ret []; } }
+                         _ { ret []/~; } }
     }
 }
 
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index 54d14f2eaf4..2f10a30bd55 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -16,7 +16,7 @@ enum cmnt_style {
     blank_line, // Just a manual blank line "\n\n", for layout
 }
 
-type cmnt = {style: cmnt_style, lines: [str], pos: uint};
+type cmnt = {style: cmnt_style, lines: [str]/~, pos: uint};
 
 fn read_to_eol(rdr: string_reader) -> str {
     let mut val = "";
@@ -41,14 +41,14 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
     }
 }
 
-fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]) {
+fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]/~) {
     #debug(">>> blank-line comment");
-    let v: [str] = [];
-    comments += [{style: blank_line, lines: v, pos: rdr.chpos}];
+    let v: [str]/~ = []/~;
+    comments += [{style: blank_line, lines: v, pos: rdr.chpos}]/~;
 }
 
 fn consume_whitespace_counting_blank_lines(rdr: string_reader,
-                                           &comments: [cmnt]) {
+                                           &comments: [cmnt]/~) {
     while is_whitespace(rdr.curr) && !is_eof(rdr) {
         if rdr.col == 0u && rdr.curr == '\n' {
             push_blank_line_comment(rdr, comments);
@@ -62,18 +62,18 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
     let p = rdr.chpos;
     #debug("<<< shebang comment");
     ret {style: if code_to_the_left { trailing } else { isolated },
-         lines: [read_one_line_comment(rdr)],
+         lines: [read_one_line_comment(rdr)]/~,
          pos: p};
 }
 
 fn read_line_comments(rdr: string_reader, code_to_the_left: bool) -> cmnt {
     #debug(">>> line comments");
     let p = rdr.chpos;
-    let mut lines: [str] = [];
+    let mut lines: [str]/~ = []/~;
     while rdr.curr == '/' && nextch(rdr) == '/' {
         let line = read_one_line_comment(rdr);
         log(debug, line);
-        lines += [line];
+        lines += [line]/~;
         consume_non_eol_whitespace(rdr);
     }
     #debug("<<< line comments");
@@ -88,7 +88,7 @@ fn all_whitespace(s: str, begin: uint, end: uint) -> bool {
     ret true;
 }
 
-fn trim_whitespace_prefix_and_push_line(&lines: [str],
+fn trim_whitespace_prefix_and_push_line(&lines: [str]/~,
                                         s: str, col: uint) unsafe {
     let mut s1;
     let len = str::len(s);
@@ -98,13 +98,13 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str],
         } else { s1 = ""; }
     } else { s1 = s; }
     log(debug, "pushing line: " + s1);
-    lines += [s1];
+    lines += [s1]/~;
 }
 
 fn read_block_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
     #debug(">>> block comment");
     let p = rdr.chpos;
-    let mut lines: [str] = [];
+    let mut lines: [str]/~ = []/~;
     let mut col: uint = rdr.col;
     bump(rdr);
     bump(rdr);
@@ -153,14 +153,14 @@ fn peeking_at_comment(rdr: string_reader) -> bool {
 }
 
 fn consume_comment(rdr: string_reader, code_to_the_left: bool,
-                   &comments: [cmnt]) {
+                   &comments: [cmnt]/~) {
     #debug(">>> consume comment");
     if rdr.curr == '/' && nextch(rdr) == '/' {
-        comments += [read_line_comments(rdr, code_to_the_left)];
+        comments += [read_line_comments(rdr, code_to_the_left)]/~;
     } else if rdr.curr == '/' && nextch(rdr) == '*' {
-        comments += [read_block_comment(rdr, code_to_the_left)];
+        comments += [read_block_comment(rdr, code_to_the_left)]/~;
     } else if rdr.curr == '#' && nextch(rdr) == '!' {
-        comments += [read_shebang_comment(rdr, code_to_the_left)];
+        comments += [read_shebang_comment(rdr, code_to_the_left)]/~;
     } else { fail; }
     #debug("<<< consume comment");
 }
@@ -170,7 +170,7 @@ type lit = {lit: str, pos: uint};
 fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
                                 path: str,
                                 srdr: io::reader) ->
-   {cmnts: [cmnt], lits: [lit]} {
+   {cmnts: [cmnt]/~, lits: [lit]/~} {
     let src = @str::from_bytes(srdr.read_whole_stream());
     let itr = @interner::mk::<@str>(
         {|x|str::hash(*x)},
@@ -179,8 +179,8 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
     let rdr = lexer::new_low_level_string_reader
         (span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr);
 
-    let mut comments: [cmnt] = [];
-    let mut literals: [lit] = [];
+    let mut comments: [cmnt]/~ = []/~;
+    let mut literals: [lit]/~ = []/~;
     let mut first_read: bool = true;
     while !is_eof(rdr) {
         loop {
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 1d92561a108..8cc6f3d6484 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -149,9 +149,9 @@ impl parser_common for parser {
     }
 
     fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
-                                       f: fn(parser) -> T) -> [T] {
+                                       f: fn(parser) -> T) -> [T]/~ {
         let mut first = true;
-        let mut v = [];
+        let mut v = []/~;
         while self.token != token::GT
             && self.token != token::BINOP(token::SHR) {
             alt sep {
@@ -166,7 +166,7 @@ impl parser_common for parser {
     }
 
     fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
-                                f: fn(parser) -> T) -> [T] {
+                                f: fn(parser) -> T) -> [T]/~ {
         let v = self.parse_seq_to_before_gt(sep, f);
         self.expect_gt();
 
@@ -174,7 +174,7 @@ impl parser_common for parser {
     }
 
     fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
-                                f: fn(parser) -> T) -> spanned<[T]> {
+                                f: fn(parser) -> T) -> spanned<[T]/~> {
         let lo = self.span.lo;
         self.expect(token::LT);
         let result = self.parse_seq_to_before_gt::<T>(sep, f);
@@ -184,7 +184,7 @@ impl parser_common for parser {
     }
 
     fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> [T] {
+                                 f: fn(parser) -> T) -> [T]/~ {
         let val = self.parse_seq_to_before_end(ket, sep, f);
         self.bump();
         ret val;
@@ -192,9 +192,9 @@ impl parser_common for parser {
 
 
     fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> [T] {
+                                        f: fn(parser) -> T) -> [T]/~ {
         let mut first: bool = true;
-        let mut v: [T] = [];
+        let mut v: [T]/~ = []/~;
         while self.token != ket {
             alt sep.sep {
               some(t) { if first { first = false; }
@@ -207,8 +207,10 @@ impl parser_common for parser {
         ret v;
     }
 
-    fn parse_unspanned_seq<T: copy>(bra: token::token, ket: token::token,
-                                    sep: seq_sep, f: fn(parser) -> T) -> [T] {
+    fn parse_unspanned_seq<T: copy>(bra: token::token,
+                                    ket: token::token,
+                                    sep: seq_sep,
+                                    f: fn(parser) -> T) -> [T]/~ {
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
         self.bump();
@@ -218,7 +220,7 @@ impl parser_common for parser {
     // NB: Do not use this function unless you actually plan to place the
     // spanned list in the AST.
     fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<[T]> {
+                          f: fn(parser) -> T) -> spanned<[T]/~> {
         let lo = self.span.lo;
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index ae11c883443..f1dd8d69cc1 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -7,24 +7,26 @@ type ctx =
     @{sess: parse::parse_sess,
       cfg: ast::crate_cfg};
 
-fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str,
-                         &view_items: [@ast::view_item],
-                         &items: [@ast::item]) {
+fn eval_crate_directives(cx: ctx,
+                         cdirs: [@ast::crate_directive]/~,
+                         prefix: str,
+                         &view_items: [@ast::view_item]/~,
+                         &items: [@ast::item]/~) {
     for cdirs.each {|sub_cdir|
         eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
     }
 }
 
-fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive],
+fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive]/~,
                                 prefix: str, suffix: option<str>)
-    -> (ast::_mod, [ast::attribute]) {
+    -> (ast::_mod, [ast::attribute]/~) {
     #debug("eval crate prefix: %s", prefix);
     #debug("eval crate suffix: %s",
            option::get_default(suffix, "none"));
     let (cview_items, citems, cattrs)
         = parse_companion_mod(cx, prefix, suffix);
-    let mut view_items: [@ast::view_item] = [];
-    let mut items: [@ast::item] = [];
+    let mut view_items: [@ast::view_item]/~ = []/~;
+    let mut items: [@ast::item]/~ = []/~;
     eval_crate_directives(cx, cdirs, prefix, view_items, items);
     ret ({view_items: view_items + cview_items,
           items: items + citems},
@@ -42,7 +44,7 @@ We build the path to the companion mod by combining the prefix and the
 optional suffix then adding the .rs extension.
 */
 fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
-    -> ([@ast::view_item], [@ast::item], [ast::attribute]) {
+    -> ([@ast::view_item]/~, [@ast::item]/~, [ast::attribute]/~) {
 
     fn companion_file(+prefix: str, suffix: option<str>) -> str {
         ret alt suffix {
@@ -72,11 +74,11 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
         cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
         ret (m0.view_items, m0.items, inner_attrs.inner);
     } else {
-        ret ([], [], []);
+        ret ([]/~, []/~, []/~);
     }
 }
 
-fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str {
+fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]/~) -> @str {
     alt ::attr::first_attr_value_str_by_name(attrs, "path") {
       some(d) {
         ret d;
@@ -86,8 +88,8 @@ fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str {
 }
 
 fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
-                        &view_items: [@ast::view_item],
-                        &items: [@ast::item]) {
+                        &view_items: [@ast::view_item]/~,
+                        &items: [@ast::item]/~) {
     alt cdir.node {
       ast::cdir_src_mod(id, attrs) {
         let file_path = cdir_path_opt(@(*id + ".rs"), attrs);
@@ -108,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
         // Thread defids, chpos and byte_pos through the parsers
         cx.sess.chpos = r0.chpos;
         cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
-        items += [i];
+        items += [i]/~;
       }
       ast::cdir_dir_mod(id, cdirs, attrs) {
         let path = cdir_path_opt(id, attrs);
@@ -126,9 +128,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
               vis: ast::public,
               span: cdir.span};
         cx.sess.next_id += 1;
-        items += [i];
+        items += [i]/~;
       }
-      ast::cdir_view_item(vi) { view_items += [vi]; }
+      ast::cdir_view_item(vi) { view_items += [vi]/~; }
       ast::cdir_syntax(pth) { }
     }
 }
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 5a3dceace8d..8687e011635 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -26,7 +26,7 @@ enum tt_frame_up { /* to break a circularity */
 /* TODO: figure out how to have a uniquely linked stack, and change to `~` */
 #[doc = "an unzipping of `token_tree`s"]
 type tt_frame = @{
-    readme: [ast::token_tree],
+    readme: [ast::token_tree]/~,
     mut idx: uint,
     up: tt_frame_up
 };
@@ -41,7 +41,7 @@ type tt_reader = @{
 };
 
 fn new_tt_reader(span_diagnostic: diagnostic::span_handler,
-                 itr: @interner::interner<@str>, src: [ast::token_tree])
+                 itr: @interner::interner<@str>, src: [ast::token_tree]/~)
     -> tt_reader {
     let r = @{span_diagnostic: span_diagnostic, interner: itr,
               mut cur: @{readme: src, mut idx: 0u,
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 13b68b2ce70..d0847a974b7 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -13,6 +13,7 @@ import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
                 seq_sep_none, token_to_str};
 import common::*;//{parser_common};
 import dvec::{dvec, extensions};
+import vec::{push};
 
 export file_type;
 export parser;
@@ -51,10 +52,10 @@ enum pexpr {
  */
 enum class_contents { ctor_decl(fn_decl, blk, codemap::span),
                       dtor_decl(blk, codemap::span),
-                      members([@class_member]) }
+                      members([@class_member]/~) }
 
 type arg_or_capture_item = either<arg, capture_item>;
-type item_info = (ident, item_, option<[attribute]>);
+type item_info = (ident, item_, option<[attribute]/~>);
 
 class parser {
     let sess: parse_sess;
@@ -176,14 +177,14 @@ class parser {
         // functions can't have constrained types. Not sure whether
         // that would be desirable anyway. See bug for the story on
         // constrained types.
-        let constrs: [@constr] = [];
+        let constrs: [@constr]/~ = []/~;
         let (ret_style, ret_ty) = self.parse_ret_ty();
         ret {inputs: inputs, output: ret_ty,
              purity: purity, cf: ret_style,
              constraints: constrs};
     }
 
-    fn parse_ty_methods() -> [ty_method] {
+    fn parse_ty_methods() -> [ty_method]/~ {
         self.parse_unspanned_seq(token::LBRACE, token::RBRACE,
                                  seq_sep_none()) { |p|
             let attrs = p.parse_outer_attributes();
@@ -215,7 +216,7 @@ class parser {
 
     // if i is the jth ident in args, return j
     // otherwise, fail
-    fn ident_index(args: [arg], i: ident) -> uint {
+    fn ident_index(args: [arg]/~, i: ident) -> uint {
         let mut j = 0u;
         for args.each {|a| if a.ident == i { ret j; } j += 1u; }
         self.fatal("unbound variable `" + *i + "` in constraint arg");
@@ -235,7 +236,7 @@ class parser {
         ret @{node: carg, span: sp};
     }
 
-    fn parse_constr_arg(args: [arg]) -> @constr_arg {
+    fn parse_constr_arg(args: [arg]/~) -> @constr_arg {
         let sp = self.span;
         let mut carg = carg_base;
         if self.token == token::BINOP(token::STAR) {
@@ -247,7 +248,7 @@ class parser {
         ret @{node: carg, span: sp};
     }
 
-    fn parse_ty_constr(fn_args: [arg]) -> @constr {
+    fn parse_ty_constr(fn_args: [arg]/~) -> @constr {
         let lo = self.span.lo;
         let path = self.parse_path_without_tps();
         let args = self.parse_unspanned_seq(
@@ -261,7 +262,7 @@ class parser {
     fn parse_constr_in_type() -> @ty_constr {
         let lo = self.span.lo;
         let path = self.parse_path_without_tps();
-        let args: [@ty_constr_arg] = self.parse_unspanned_seq(
+        let args: [@ty_constr_arg]/~ = self.parse_unspanned_seq(
             token::LPAREN, token::RPAREN,
             seq_sep_trailing_disallowed(token::COMMA),
             {|p| p.parse_type_constr_arg()});
@@ -272,17 +273,17 @@ class parser {
 
 
     fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>) ->
-        [@constr_general<T>] {
-        let mut constrs: [@constr_general<T>] = [];
+        [@constr_general<T>]/~ {
+        let mut constrs: [@constr_general<T>]/~ = []/~;
         loop {
             let constr = pser(self);
-            constrs += [constr];
+            constrs += [constr]/~;
             if self.token == token::COMMA { self.bump(); }
             else { ret constrs; }
         };
     }
 
-    fn parse_type_constraints() -> [@ty_constr] {
+    fn parse_type_constraints() -> [@ty_constr]/~ {
         ret self.parse_constrs({|p| p.parse_constr_in_type()});
     }
 
@@ -359,10 +360,10 @@ class parser {
                 self.bump();
                 ty_nil
             } else {
-                let mut ts = [self.parse_ty(false)];
+                let mut ts = [self.parse_ty(false)]/~;
                 while self.token == token::COMMA {
                     self.bump();
-                    ts += [self.parse_ty(false)];
+                    ts += [self.parse_ty(false)]/~;
                 }
                 let t = if vec::len(ts) == 1u { ts[0].node }
                 else { ty_tup(ts) };
@@ -583,22 +584,22 @@ class parser {
 
         let lo = self.span.lo;
         let global = self.eat(token::MOD_SEP);
-        let mut ids = [];
+        let mut ids = []/~;
         loop {
             let is_not_last =
                 self.look_ahead(2u) != token::LT
                 && self.look_ahead(1u) == token::MOD_SEP;
 
             if is_not_last {
-                ids += [parse_ident(self)];
+                ids += [parse_ident(self)]/~;
                 self.expect(token::MOD_SEP);
             } else {
-                ids += [parse_last_ident(self)];
+                ids += [parse_last_ident(self)]/~;
                 break;
             }
         }
         @{span: mk_sp(lo, self.last_span.hi), global: global,
-          idents: ids, rp: none, types: []}
+          idents: ids, rp: none, types: []/~}
     }
 
     fn parse_value_path() -> @path {
@@ -639,7 +640,7 @@ class parser {
                 self.parse_seq_lt_gt(some(token::COMMA),
                                      {|p| p.parse_ty(false)})
             } else {
-                {node: [], span: path.span}
+                {node: []/~, span: path.span}
             }
         };
 
@@ -715,9 +716,9 @@ class parser {
                 let lit = @spanned(lo, hi, lit_nil);
                 ret self.mk_pexpr(lo, hi, expr_lit(lit));
             }
-            let mut es = [self.parse_expr()];
+            let mut es = [self.parse_expr()]/~;
             while self.token == token::COMMA {
-                self.bump(); es += [self.parse_expr()];
+                self.bump(); es += [self.parse_expr()]/~;
             }
             hi = self.span.hi;
             self.expect(token::RPAREN);
@@ -733,7 +734,7 @@ class parser {
             if self.is_keyword("mut") ||
                 is_plain_ident(self.token)
                 && self.look_ahead(1u) == token::COLON {
-                let mut fields = [self.parse_field(token::COLON)];
+                let mut fields = [self.parse_field(token::COLON)]/~;
                 let mut base = none;
                 while self.token != token::RBRACE {
                     // optional comma before "with"
@@ -750,7 +751,7 @@ class parser {
                         // record ends by an optional trailing comma
                         break;
                     }
-                    fields += [self.parse_field(token::COLON)];
+                    fields += [self.parse_field(token::COLON)]/~;
                 }
                 hi = self.span.hi;
                 self.expect(token::RBRACE);
@@ -997,7 +998,7 @@ class parser {
                         self.expect(token::LT);
                         self.parse_seq_to_gt(some(token::COMMA),
                                              {|p| p.parse_ty(false)})
-                    } else { [] };
+                    } else { []/~ };
                     e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e),
                                                          self.get_str(i),
                                                          tys));
@@ -1027,13 +1028,13 @@ class parser {
                 let blk = self.parse_fn_block_expr();
                 alt e.node {
                   expr_call(f, args, false) {
-                    e = pexpr(@{node: expr_call(f, args + [blk], true)
+                    e = pexpr(@{node: expr_call(f, args + [blk]/~, true)
                                 with *self.to_expr(e)});
                   }
                   _ {
                     e = self.mk_pexpr(lo, self.last_span.hi,
                                       expr_call(self.to_expr(e),
-                                                [blk], true));
+                                                [blk]/~, true));
                   }
                 }
               }
@@ -1085,10 +1086,10 @@ class parser {
         ret alt self.token {
           token::LPAREN | token::LBRACE | token::LBRACKET {
             let ket = flip(self.token);
-            tt_delim([parse_tt_flat(self, true)] +
+            tt_delim([parse_tt_flat(self, true)]/~ +
                      self.parse_seq_to_before_end(ket, seq_sep_none(),
                                                   {|p| p.parse_token_tree()})
-                     + [parse_tt_flat(self, true)])
+                     + [parse_tt_flat(self, true)]/~)
           }
           _ { parse_tt_flat(self, false) }
         };
@@ -1354,7 +1355,7 @@ class parser {
             let b_arg = vec::last(args);
             let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi,
                                     ctor(b_arg));
-            @{node: expr_call(f, vec::init(args) + [last], true)
+            @{node: expr_call(f, vec::init(args) + [last]/~, true)
               with *call}
           }
           _ {
@@ -1385,14 +1386,14 @@ class parser {
         else { alt_exhaustive };
         let discriminant = self.parse_expr();
         self.expect(token::LBRACE);
-        let mut arms: [arm] = [];
+        let mut arms: [arm]/~ = []/~;
         while self.token != token::RBRACE {
             let pats = self.parse_pats();
             let mut guard = none;
             if self.eat_keyword("if") { guard = some(self.parse_expr()); }
             if self.token == token::FAT_ARROW { self.bump(); }
             let blk = self.parse_block();
-            arms += [{pats: pats, guard: guard, body: blk}];
+            arms += [{pats: pats, guard: guard, body: blk}]/~;
         }
         let mut hi = self.span.hi;
         self.bump();
@@ -1434,10 +1435,10 @@ class parser {
         }
     }
 
-    fn parse_pats() -> [@pat] {
-        let mut pats = [];
+    fn parse_pats() -> [@pat]/~ {
+        let mut pats = []/~;
         loop {
-            pats += [self.parse_pat()];
+            pats += [self.parse_pat()]/~;
             if self.token == token::BINOP(token::OR) { self.bump(); }
             else { ret pats; }
         };
@@ -1463,7 +1464,7 @@ class parser {
           }
           token::LBRACE {
             self.bump();
-            let mut fields = [];
+            let mut fields = []/~;
             let mut etc = false;
             let mut first = true;
             while self.token != token::RBRACE {
@@ -1498,7 +1499,7 @@ class parser {
                                node: pat_ident(fieldpath, none),
                                span: mk_sp(lo, hi)};
                 }
-                fields += [{ident: fieldname, pat: subpat}];
+                fields += [{ident: fieldname, pat: subpat}]/~;
             }
             hi = self.span.hi;
             self.bump();
@@ -1513,10 +1514,10 @@ class parser {
                 let expr = self.mk_expr(lo, hi, expr_lit(lit));
                 pat = pat_lit(expr);
             } else {
-                let mut fields = [self.parse_pat()];
+                let mut fields = [self.parse_pat()]/~;
                 while self.token == token::COMMA {
                     self.bump();
-                    fields += [self.parse_pat()];
+                    fields += [self.parse_pat()]/~;
                 }
                 if vec::len(fields) == 1u { self.expect(token::COMMA); }
                 hi = self.span.hi;
@@ -1548,7 +1549,7 @@ class parser {
             } else {
                 let enum_path = self.parse_path_with_tps(true);
                 hi = enum_path.span.hi;
-                let mut args: [@pat] = [];
+                let mut args: [@pat]/~ = []/~;
                 let mut star_pat = false;
                 alt self.token {
                   token::LPAREN {
@@ -1604,9 +1605,9 @@ class parser {
     fn parse_let() -> @decl {
         let is_mutbl = self.eat_keyword("mut");
         let lo = self.span.lo;
-        let mut locals = [self.parse_local(is_mutbl, true)];
+        let mut locals = [self.parse_local(is_mutbl, true)]/~;
         while self.eat(token::COMMA) {
-            locals += [self.parse_local(is_mutbl, true)];
+            locals += [self.parse_local(is_mutbl, true)]/~;
         }
         ret @spanned(lo, self.last_span.hi, decl_local(locals));
     }
@@ -1628,8 +1629,8 @@ class parser {
               span: mk_sp(lo, self.last_span.hi)};
     }
 
-    fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt {
-        fn check_expected_item(p: parser, current_attrs: [attribute]) {
+    fn parse_stmt(+first_item_attrs: [attribute]/~) -> @stmt {
+        fn check_expected_item(p: parser, current_attrs: [attribute]/~) {
             // If we have attributes then we should have an item
             if vec::is_not_empty(current_attrs) {
                 p.fatal("expected item");
@@ -1645,7 +1646,7 @@ class parser {
         } else {
             let mut item_attrs;
             alt self.parse_outer_attrs_or_ext(first_item_attrs) {
-              none { item_attrs = []; }
+              none { item_attrs = []/~; }
               some(left(attrs)) { item_attrs = attrs; }
               some(right(ext)) {
                 ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id()));
@@ -1685,14 +1686,15 @@ class parser {
         ret blk;
     }
 
-    fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) {
+    fn parse_inner_attrs_and_block(parse_attrs: bool)
+        -> ([attribute]/~, blk) {
 
         fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
-            {inner: [attribute], next: [attribute]} {
+            {inner: [attribute]/~, next: [attribute]/~} {
             if parse_attrs {
                 p.parse_inner_attrs_and_next()
             } else {
-                {inner: [], next: []}
+                {inner: []/~, next: []/~}
             }
         }
 
@@ -1727,12 +1729,12 @@ class parser {
     // necessary, and this should take a qualifier.
     // some blocks start with "#{"...
     fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk {
-        self.parse_block_tail_(lo, s, [])
+        self.parse_block_tail_(lo, s, []/~)
     }
 
     fn parse_block_tail_(lo: uint, s: blk_check_mode,
-                         +first_item_attrs: [attribute]) -> blk {
-        let mut stmts = [];
+                         +first_item_attrs: [attribute]/~) -> blk {
+        let mut stmts = []/~;
         let mut expr = none;
         let {attrs_remaining, view_items} =
             self.parse_view(first_item_attrs, true);
@@ -1749,13 +1751,14 @@ class parser {
               }
               _ {
                 let stmt = self.parse_stmt(initial_attrs);
-                initial_attrs = [];
+                initial_attrs = []/~;
                 alt stmt.node {
                   stmt_expr(e, stmt_id) { // Expression without semicolon:
                     alt self.token {
                       token::SEMI {
                         self.bump();
-                        stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}];
+                        push(stmts,
+                             @{node: stmt_semi(e, stmt_id) with *stmt});
                       }
                       token::RBRACE {
                         expr = some(e);
@@ -1766,13 +1769,13 @@ class parser {
                                         but found '"
                                        + token_to_str(self.reader, t) + "'");
                         }
-                        stmts += [stmt];
+                        stmts += [stmt]/~;
                       }
                     }
                   }
 
                   _ { // All other kinds of statements:
-                    stmts += [stmt];
+                    stmts += [stmt]/~;
 
                     if classify::stmt_ends_with_semi(*stmt) {
                         self.expect(token::SEMI);
@@ -1790,30 +1793,32 @@ class parser {
     }
 
     fn parse_ty_param() -> ty_param {
-        let mut bounds = [];
+        let mut bounds = []/~;
         let ident = self.parse_ident();
         if self.eat(token::COLON) {
             while self.token != token::COMMA && self.token != token::GT {
-                if self.eat_keyword("send") { bounds += [bound_send]; }
-                else if self.eat_keyword("copy") { bounds += [bound_copy]; }
-                else if self.eat_keyword("const") { bounds += [bound_const]; }
-                else { bounds += [bound_iface(self.parse_ty(false))]; }
+                if self.eat_keyword("send") { push(bounds, bound_send); }
+                else if self.eat_keyword("copy") { push(bounds, bound_copy) }
+                else if self.eat_keyword("const") {
+                    push(bounds, bound_const)
+                }
+                else { push(bounds, bound_iface(self.parse_ty(false))); }
             }
         }
         ret {ident: ident, id: self.get_id(), bounds: @bounds};
     }
 
-    fn parse_ty_params() -> [ty_param] {
+    fn parse_ty_params() -> [ty_param]/~ {
         if self.eat(token::LT) {
             self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty_param()})
-        } else { [] }
+        } else { []/~ }
     }
 
     fn parse_fn_decl(purity: purity,
                      parse_arg_fn: fn(parser) -> arg_or_capture_item)
         -> (fn_decl, capture_clause) {
 
-        let args_or_capture_items: [arg_or_capture_item] =
+        let args_or_capture_items: [arg_or_capture_item]/~ =
             self.parse_unspanned_seq(
                 token::LPAREN, token::RPAREN,
                 seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn);
@@ -1824,7 +1829,7 @@ class parser {
         // Use the args list to translate each bound variable
         // mentioned in a constraint to an arg index.
         // Seems weird to do this in the parser, but I'm not sure how else to.
-        let mut constrs = [];
+        let mut constrs = []/~;
         if self.token == token::COLON {
             self.bump();
             constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) });
@@ -1840,7 +1845,7 @@ class parser {
     fn parse_fn_block_decl() -> (fn_decl, capture_clause) {
         let inputs_captures = {
             if self.eat(token::OROR) {
-                []
+                []/~
             } else {
                 self.parse_unspanned_seq(
                     token::BINOP(token::OR), token::BINOP(token::OR),
@@ -1857,11 +1862,11 @@ class parser {
               output: output,
               purity: impure_fn,
               cf: return_val,
-              constraints: []},
+              constraints: []/~},
              @either::rights(inputs_captures));
     }
 
-    fn parse_fn_header() -> {ident: ident, tps: [ty_param]} {
+    fn parse_fn_header() -> {ident: ident, tps: [ty_param]/~} {
         let id = self.parse_value_ident();
         let ty_params = self.parse_ty_params();
         ret {ident: id, tps: ty_params};
@@ -1869,7 +1874,7 @@ class parser {
 
     fn mk_item(lo: uint, hi: uint, +ident: ident,
                +node: item_, vis: visibility,
-               +attrs: [attribute]) -> @item {
+               +attrs: [attribute]/~) -> @item {
         ret @{ident: ident,
               attrs: attrs,
               id: self.get_id(),
@@ -1922,9 +1927,9 @@ class parser {
     }
 
     // Parses three variants (with the region/type params always optional):
-    //    impl /&<T: copy> of to_str for [T] { ... }
-    //    impl name/&<T> of to_str for [T] { ... }
-    //    impl name/&<T> for [T] { ... }
+    //    impl /&<T: copy> of to_str for [T]/~ { ... }
+    //    impl name/&<T> of to_str for [T]/~ { ... }
+    //    impl name/&<T> for [T]/~ { ... }
     fn parse_item_impl() -> item_info {
         fn wrap_path(p: parser, pt: @path) -> @ty {
             @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
@@ -1936,7 +1941,7 @@ class parser {
                 (none, self.parse_region_param(), self.parse_ty_params())
             }
             else if self.is_keyword("of") {
-                (none, rp_none, [])
+                (none, rp_none, []/~)
             } else {
                 let id = self.parse_ident();
                 let rp = self.parse_region_param();
@@ -1956,10 +1961,10 @@ class parser {
         };
         self.expect_keyword("for");
         let ty = self.parse_ty(false);
-        let mut meths = [];
+        let mut meths = []/~;
         self.expect(token::LBRACE);
         while !self.eat(token::RBRACE) {
-            meths += [self.parse_method(public)];
+            meths += [self.parse_method(public)]/~;
         }
         (ident, item_impl(tps, rp, ifce, ty, meths), none)
     }
@@ -1969,7 +1974,7 @@ class parser {
     // the return type of the ctor function.
     fn ident_to_path_tys(i: ident,
                          rp: region_param,
-                         typarams: [ty_param]) -> @path {
+                         typarams: [ty_param]/~) -> @path {
         let s = self.last_span;
 
         // Hack.  But then, this whole function is in service of a hack.
@@ -1978,7 +1983,7 @@ class parser {
           rp_self { some(self.region_from_name(some(@"self"))) }
         };
 
-        @{span: s, global: false, idents: [i],
+        @{span: s, global: false, idents: [i]/~,
           rp: a_r,
           types: vec::map(typarams, {|tp|
               @{id: self.get_id(),
@@ -1992,7 +1997,7 @@ class parser {
           id: self.get_id()}
     }
 
-    fn parse_iface_ref_list() -> [@iface_ref] {
+    fn parse_iface_ref_list() -> [@iface_ref]/~ {
         self.parse_seq_to_before_end(
             token::LBRACE, seq_sep_trailing_disallowed(token::COMMA),
             {|p| p.parse_iface_ref()})
@@ -2003,11 +2008,11 @@ class parser {
         let rp = self.parse_region_param();
         let ty_params = self.parse_ty_params();
         let class_path = self.ident_to_path_tys(class_name, rp, ty_params);
-        let ifaces : [@iface_ref] = if self.eat(token::COLON)
+        let ifaces : [@iface_ref]/~ = if self.eat(token::COLON)
             { self.parse_iface_ref_list() }
-        else { [] };
+        else { []/~ };
         self.expect(token::LBRACE);
-        let mut ms: [@class_member] = [];
+        let mut ms: [@class_member]/~ = []/~;
         let ctor_id = self.get_id();
         let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none;
         let mut the_dtor : option<(blk, codemap::span)> = none;
@@ -2092,16 +2097,16 @@ class parser {
         }
         else if self.eat_keyword("priv") {
             self.expect(token::LBRACE);
-            let mut results = [];
+            let mut results = []/~;
             while self.token != token::RBRACE {
-                results += [self.parse_single_class_item(private)];
+                results += [self.parse_single_class_item(private)]/~;
             }
             self.bump();
             ret members(results);
         }
         else {
             // Probably need to parse attrs
-            ret members([self.parse_single_class_item(public)]);
+            ret members([self.parse_single_class_item(public)]/~);
         }
     }
 
@@ -2112,11 +2117,11 @@ class parser {
     }
 
     fn parse_mod_items(term: token::token,
-                       +first_item_attrs: [attribute]) -> _mod {
+                       +first_item_attrs: [attribute]/~) -> _mod {
         // Shouldn't be any view items since we've already parsed an item attr
         let {attrs_remaining, view_items} =
             self.parse_view(first_item_attrs, false);
-        let mut items: [@item] = [];
+        let mut items: [@item]/~ = []/~;
         let mut first = true;
         while self.token != term {
             let mut attrs = self.parse_outer_attributes();
@@ -2124,7 +2129,7 @@ class parser {
             #debug["parse_mod_items: parse_item(attrs=%?)", attrs];
             let vis = self.parse_visibility(private);
             alt self.parse_item(attrs, vis) {
-              some(i) { items += [i]; }
+              some(i) { items += [i]/~; }
               _ {
                 self.fatal("expected item but found '" +
                            token_to_str(self.reader, self.token) + "'");
@@ -2160,7 +2165,7 @@ class parser {
         (id, item_mod(m), some(inner_attrs.inner))
     }
 
-    fn parse_item_native_fn(+attrs: [attribute],
+    fn parse_item_native_fn(+attrs: [attribute]/~,
                             purity: purity) -> @native_item {
         let lo = self.last_span.lo;
         let t = self.parse_fn_header();
@@ -2186,22 +2191,22 @@ class parser {
         else { self.unexpected(); }
     }
 
-    fn parse_native_item(+attrs: [attribute]) ->
+    fn parse_native_item(+attrs: [attribute]/~) ->
         @native_item {
         self.parse_item_native_fn(attrs, self.parse_fn_purity())
     }
 
-    fn parse_native_mod_items(+first_item_attrs: [attribute]) ->
+    fn parse_native_mod_items(+first_item_attrs: [attribute]/~) ->
         native_mod {
         // Shouldn't be any view items since we've already parsed an item attr
         let {attrs_remaining, view_items} =
             self.parse_view(first_item_attrs, false);
-        let mut items: [@native_item] = [];
+        let mut items: [@native_item]/~ = []/~;
         let mut initial_attrs = attrs_remaining;
         while self.token != token::RBRACE {
             let attrs = initial_attrs + self.parse_outer_attributes();
-            initial_attrs = [];
-            items += [self.parse_native_item(attrs)];
+            initial_attrs = []/~;
+            items += [self.parse_native_item(attrs)]/~;
         }
         ret {view_items: view_items,
              items: items};
@@ -2246,7 +2251,7 @@ class parser {
         let id = self.parse_ident();
         let rp = self.parse_region_param();
         let ty_params = self.parse_ty_params();
-        let mut variants: [variant] = [];
+        let mut variants: [variant]/~ = []/~;
         // Newtype syntax
         if self.token == token::EQ {
             self.check_restricted_keywords_(*id);
@@ -2256,12 +2261,12 @@ class parser {
             let variant =
                 spanned(ty.span.lo, ty.span.hi,
                         {name: id,
-                         attrs: [],
-                         args: [{ty: ty, id: self.get_id()}],
+                         attrs: []/~,
+                         args: [{ty: ty, id: self.get_id()}]/~,
                          id: self.get_id(),
                          disr_expr: none,
                          vis: public});
-            ret (id, item_enum([variant], ty_params, rp), none);
+            ret (id, item_enum([variant]/~, ty_params, rp), none);
         }
         self.expect(token::LBRACE);
 
@@ -2272,7 +2277,7 @@ class parser {
             let vlo = self.span.lo;
             let vis = self.parse_visibility(default_vis);
             let ident = self.parse_value_ident();
-            let mut args = [], disr_expr = none;
+            let mut args = []/~, disr_expr = none;
             if self.token == token::LPAREN {
                 all_nullary = false;
                 let arg_tys = self.parse_unspanned_seq(
@@ -2280,7 +2285,7 @@ class parser {
                     seq_sep_trailing_disallowed(token::COMMA),
                     {|p| p.parse_ty(false)});
                 for arg_tys.each {|ty|
-                    args += [{ty: ty, id: self.get_id()}];
+                    args += [{ty: ty, id: self.get_id()}]/~;
                 }
             } else if self.eat(token::EQ) {
                 have_disr = true;
@@ -2290,7 +2295,7 @@ class parser {
             let vr = {name: ident, attrs: variant_attrs,
                       args: args, id: self.get_id(),
                       disr_expr: disr_expr, vis: vis};
-            variants += [spanned(vlo, self.last_span.hi, vr)];
+            variants += [spanned(vlo, self.last_span.hi, vr)]/~;
 
             if !self.eat(token::COMMA) { break; }
         }
@@ -2333,7 +2338,7 @@ class parser {
         }
     }
 
-    fn parse_item(+attrs: [attribute], vis: visibility)
+    fn parse_item(+attrs: [attribute]/~, vis: visibility)
         -> option<@item> {
         let lo = self.span.lo;
         let (ident, item_, extra_attrs) = if self.eat_keyword("const") {
@@ -2384,20 +2389,20 @@ class parser {
     fn parse_view_path() -> @view_path {
         let lo = self.span.lo;
         let first_ident = self.parse_ident();
-        let mut path = [first_ident];
+        let mut path = [first_ident]/~;
         #debug("parsed view_path: %s", *first_ident);
         alt self.token {
           token::EQ {
             // x = foo::bar
             self.bump();
-            path = [self.parse_ident()];
+            path = [self.parse_ident()]/~;
             while self.token == token::MOD_SEP {
                 self.bump();
                 let id = self.parse_ident();
-                path += [id];
+                path += [id]/~;
             }
             let path = @{span: mk_sp(lo, self.span.hi), global: false,
-                         idents: path, rp: none, types: []};
+                         idents: path, rp: none, types: []/~};
             ret @spanned(lo, self.span.hi,
                          view_path_simple(first_ident, path, self.get_id()));
           }
@@ -2411,7 +2416,7 @@ class parser {
 
                   token::IDENT(i, _) {
                     self.bump();
-                    path += [self.get_str(i)];
+                    path += [self.get_str(i)]/~;
                   }
 
                   // foo::bar::{a,b,c}
@@ -2422,7 +2427,7 @@ class parser {
                         {|p| p.parse_path_list_ident()});
                     let path = @{span: mk_sp(lo, self.span.hi),
                                  global: false, idents: path,
-                                 rp: none, types: []};
+                                 rp: none, types: []/~};
                     ret @spanned(lo, self.span.hi,
                                  view_path_list(path, idents, self.get_id()));
                   }
@@ -2432,7 +2437,7 @@ class parser {
                     self.bump();
                     let path = @{span: mk_sp(lo, self.span.hi),
                                  global: false, idents: path,
-                                 rp: none, types: []};
+                                 rp: none, types: []/~};
                     ret @spanned(lo, self.span.hi,
                                  view_path_glob(path, self.get_id()));
                   }
@@ -2445,16 +2450,16 @@ class parser {
         }
         let last = path[vec::len(path) - 1u];
         let path = @{span: mk_sp(lo, self.span.hi), global: false,
-                     idents: path, rp: none, types: []};
+                     idents: path, rp: none, types: []/~};
         ret @spanned(lo, self.span.hi,
                      view_path_simple(last, path, self.get_id()));
     }
 
-    fn parse_view_paths() -> [@view_path] {
-        let mut vp = [self.parse_view_path()];
+    fn parse_view_paths() -> [@view_path]/~ {
+        let mut vp = [self.parse_view_path()]/~;
         while self.token == token::COMMA {
             self.bump();
-            vp += [self.parse_view_path()];
+            vp += [self.parse_view_path()]/~;
         }
         ret vp;
     }
@@ -2468,7 +2473,7 @@ class parser {
             || self.token_is_keyword("export", tok)
     }
 
-    fn parse_view_item(+attrs: [attribute]) -> @view_item {
+    fn parse_view_item(+attrs: [attribute]/~) -> @view_item {
         let lo = self.span.lo, vis = self.parse_visibility(private);
         let node = if self.eat_keyword("use") {
             self.parse_use()
@@ -2482,14 +2487,14 @@ class parser {
           vis: vis, span: mk_sp(lo, self.last_span.hi)}
     }
 
-    fn parse_view(+first_item_attrs: [attribute],
-                  only_imports: bool) -> {attrs_remaining: [attribute],
-                                          view_items: [@view_item]} {
+    fn parse_view(+first_item_attrs: [attribute]/~,
+                  only_imports: bool) -> {attrs_remaining: [attribute]/~,
+                                          view_items: [@view_item]/~} {
         let mut attrs = first_item_attrs + self.parse_outer_attributes();
-        let mut items = [];
+        let mut items = []/~;
         while if only_imports { self.is_keyword("import") }
         else { self.is_view_item() } {
-            items += [self.parse_view_item(attrs)];
+            items += [self.parse_view_item(attrs)]/~;
             attrs = self.parse_outer_attributes();
         }
         {attrs_remaining: attrs, view_items: items}
@@ -2502,7 +2507,7 @@ class parser {
         let first_item_outer_attrs = crate_attrs.next;
         let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
         ret @spanned(lo, self.span.lo,
-                     {directives: [],
+                     {directives: []/~,
                       module: m,
                       attrs: crate_attrs.inner,
                       config: self.cfg});
@@ -2523,7 +2528,7 @@ class parser {
     //
     // Each directive imperatively extends its environment with 0 or more
     // items.
-    fn parse_crate_directive(first_outer_attr: [attribute]) ->
+    fn parse_crate_directive(first_outer_attr: [attribute]/~) ->
         crate_directive {
 
         // Collect the next attributes
@@ -2564,8 +2569,8 @@ class parser {
     }
 
     fn parse_crate_directives(term: token::token,
-                              first_outer_attr: [attribute]) ->
-        [@crate_directive] {
+                              first_outer_attr: [attribute]/~) ->
+        [@crate_directive]/~ {
 
         // This is pretty ugly. If we have an outer attribute then we can't
         // accept seeing the terminator next, so if we do see it then fail the
@@ -2574,12 +2579,12 @@ class parser {
             self.expect_keyword("mod");
         }
 
-        let mut cdirs: [@crate_directive] = [];
+        let mut cdirs: [@crate_directive]/~ = []/~;
         let mut first_outer_attr = first_outer_attr;
         while self.token != term {
             let cdir = @self.parse_crate_directive(first_outer_attr);
-            cdirs += [cdir];
-            first_outer_attr = [];
+            cdirs += [cdir]/~;
+            first_outer_attr = []/~;
         }
         ret cdirs;
     }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 9d6427912df..feffbd4020c 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -260,7 +260,7 @@ fn contextual_keyword_table() -> hashmap<str, ()> {
         "with",
         /* temp */
         "sep", "many", "at_least_one", "parse"
-    ];
+    ]/~;
     for keys.each {|word|
         words.insert(word, ());
     }
@@ -298,7 +298,7 @@ fn restricted_keyword_table() -> hashmap<str, ()> {
         "true", "trait", "type",
         "unchecked", "unsafe",
         "while"
-    ];
+    ]/~;
     for keys.each {|word|
         words.insert(word, ());
     }