about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs2
-rw-r--r--src/libsyntax/parse/common.rs94
-rw-r--r--src/libsyntax/parse/eval.rs2
-rw-r--r--src/libsyntax/parse/lexer.rs26
-rw-r--r--src/libsyntax/parse/obsolete.rs75
-rw-r--r--src/libsyntax/parse/parser.rs302
-rw-r--r--src/libsyntax/parse/prec.rs4
-rw-r--r--src/libsyntax/parse/token.rs90
8 files changed, 306 insertions, 289 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 9be4909814b..42101a431d6 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -23,7 +23,7 @@ trait parser_attr {
     fn parse_optional_meta() -> ~[@ast::meta_item];
 }
 
-impl parser: parser_attr {
+impl Parser: parser_attr {
 
     fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
         -> attr_or_ext
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index c8c30ee7fa9..50c22c08f4f 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -1,63 +1,63 @@
 use std::map::{HashMap};
 use ast_util::spanned;
-use parser::parser;
+use parser::Parser;
 use lexer::reader;
 
 type seq_sep = {
-    sep: Option<token::token>,
+    sep: Option<token::Token>,
     trailing_sep_allowed: bool
 };
 
-fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep {
+fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep {
     return {sep: option::Some(t), trailing_sep_allowed: false};
 }
-fn seq_sep_trailing_allowed(t: token::token) -> seq_sep {
+fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep {
     return {sep: option::Some(t), trailing_sep_allowed: true};
 }
 fn seq_sep_none() -> seq_sep {
     return {sep: option::None, trailing_sep_allowed: false};
 }
 
-fn token_to_str(reader: reader, ++token: token::token) -> ~str {
+fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
     token::to_str(reader.interner(), token)
 }
 
 trait parser_common {
-    fn unexpected_last(t: token::token) -> !;
+    fn unexpected_last(t: token::Token) -> !;
     fn unexpected() -> !;
-    fn expect(t: token::token);
+    fn expect(t: token::Token);
     fn parse_ident() -> ast::ident;
     fn parse_path_list_ident() -> ast::path_list_ident;
     fn parse_value_ident() -> ast::ident;
-    fn eat(tok: token::token) -> bool;
+    fn eat(tok: token::Token) -> bool;
     // A sanity check that the word we are asking for is a known keyword
     fn require_keyword(word: ~str);
-    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool;
+    fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool;
     fn is_keyword(word: ~str) -> bool;
-    fn is_any_keyword(tok: token::token) -> bool;
+    fn is_any_keyword(tok: token::Token) -> bool;
     fn eat_keyword(word: ~str) -> bool;
     fn expect_keyword(word: ~str);
     fn expect_gt();
-    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::token>,
-                                       f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_to_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> spanned<~[T]>;
-    fn parse_seq_to_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_to_before_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> ~[T];
-    fn parse_unspanned_seq<T: Copy>(bra: token::token,
-                                    ket: token::token,
+    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
+                                       f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_to_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> spanned<~[T]>;
+    fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                 f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                        f: fn(Parser) -> T) -> ~[T];
+    fn parse_unspanned_seq<T: Copy>(bra: token::Token,
+                                    ket: token::Token,
                                     sep: seq_sep,
-                                    f: fn(parser) -> T) -> ~[T];
-    fn parse_seq<T: Copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<~[T]>;
+                                    f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
+                          f: fn(Parser) -> T) -> spanned<~[T]>;
 }
 
-impl parser: parser_common {
-    fn unexpected_last(t: token::token) -> ! {
+impl Parser: parser_common {
+    fn unexpected_last(t: token::Token) -> ! {
         self.span_fatal(
             copy self.last_span,
             ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`");
@@ -68,7 +68,7 @@ impl parser: parser_common {
                    + token_to_str(self.reader, self.token) + ~"`");
     }
 
-    fn expect(t: token::token) {
+    fn expect(t: token::Token) {
         if self.token == t {
             self.bump();
         } else {
@@ -104,7 +104,7 @@ impl parser: parser_common {
         return self.parse_ident();
     }
 
-    fn eat(tok: token::token) -> bool {
+    fn eat(tok: token::Token) -> bool {
         return if self.token == tok { self.bump(); true } else { false };
     }
 
@@ -117,14 +117,14 @@ impl parser: parser_common {
         }
     }
 
-    fn token_is_word(word: ~str, ++tok: token::token) -> bool {
+    fn token_is_word(word: ~str, ++tok: token::Token) -> bool {
         match tok {
           token::IDENT(sid, false) => { *self.id_to_str(sid) == word }
           _ => { false }
         }
     }
 
-    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool {
+    fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool {
         self.require_keyword(word);
         self.token_is_word(word, tok)
     }
@@ -133,7 +133,7 @@ impl parser: parser_common {
         self.token_is_keyword(word, self.token)
     }
 
-    fn is_any_keyword(tok: token::token) -> bool {
+    fn is_any_keyword(tok: token::Token) -> bool {
         match tok {
           token::IDENT(sid, false) => {
             self.keywords.contains_key_ref(self.id_to_str(sid))
@@ -216,8 +216,8 @@ impl parser: parser_common {
         }
     }
 
-    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::token>,
-                                       f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
+                                       f: fn(Parser) -> T) -> ~[T] {
         let mut first = true;
         let mut v = ~[];
         while self.token != token::GT
@@ -235,16 +235,16 @@ impl parser: parser_common {
         return v;
     }
 
-    fn parse_seq_to_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> ~[T] {
         let v = self.parse_seq_to_before_gt(sep, f);
         self.expect_gt();
 
         return v;
     }
 
-    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> spanned<~[T]> {
+    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(token::LT);
         let result = self.parse_seq_to_before_gt::<T>(sep, f);
@@ -253,16 +253,16 @@ impl parser: parser_common {
         return spanned(lo, hi, result);
     }
 
-    fn parse_seq_to_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                 f: fn(Parser) -> T) -> ~[T] {
         let val = self.parse_seq_to_before_end(ket, sep, f);
         self.bump();
         return val;
     }
 
 
-    fn parse_seq_to_before_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                        f: fn(Parser) -> T) -> ~[T] {
         let mut first: bool = true;
         let mut v: ~[T] = ~[];
         while self.token != ket {
@@ -279,10 +279,10 @@ impl parser: parser_common {
         return v;
     }
 
-    fn parse_unspanned_seq<T: Copy>(bra: token::token,
-                                    ket: token::token,
+    fn parse_unspanned_seq<T: Copy>(bra: token::Token,
+                                    ket: token::Token,
                                     sep: seq_sep,
-                                    f: fn(parser) -> T) -> ~[T] {
+                                    f: fn(Parser) -> T) -> ~[T] {
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
         self.bump();
@@ -291,8 +291,8 @@ impl parser: parser_common {
 
     // NB: Do not use this function unless you actually plan to place the
     // spanned list in the AST.
-    fn parse_seq<T: Copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<~[T]> {
+    fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
+                          f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index c9106028491..56c9d4de9f3 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -1,4 +1,4 @@
-use parser::{parser, SOURCE_FILE};
+use parser::{Parser, SOURCE_FILE};
 use attr::parser_attr;
 
 export eval_crate_directives_to_mod;
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 06fcc1cf958..8f57d733eb5 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -10,11 +10,11 @@ export string_reader_as_reader, tt_reader_as_reader;
 
 trait reader {
     fn is_eof() -> bool;
-    fn next_token() -> {tok: token::token, sp: span};
+    fn next_token() -> {tok: token::Token, sp: span};
     fn fatal(~str) -> !;
     fn span_diag() -> span_handler;
     pure fn interner() -> @token::ident_interner;
-    fn peek() -> {tok: token::token, sp: span};
+    fn peek() -> {tok: token::Token, sp: span};
     fn dup() -> reader;
 }
 
@@ -28,7 +28,7 @@ type string_reader = @{
     filemap: codemap::filemap,
     interner: @token::ident_interner,
     /* cached: */
-    mut peek_tok: token::token,
+    mut peek_tok: token::Token,
     mut peek_span: span
 };
 
@@ -69,7 +69,7 @@ fn dup_string_reader(&&r: string_reader) -> string_reader {
 
 impl string_reader: reader {
     fn is_eof() -> bool { is_eof(self) }
-    fn next_token() -> {tok: token::token, sp: span} {
+    fn next_token() -> {tok: token::Token, sp: span} {
         let ret_val = {tok: self.peek_tok, sp: self.peek_span};
         string_advance_token(self);
         return ret_val;
@@ -79,7 +79,7 @@ impl string_reader: reader {
     }
     fn span_diag() -> span_handler { self.span_diagnostic }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::token, sp: span} {
+    fn peek() -> {tok: token::Token, sp: span} {
         {tok: self.peek_tok, sp: self.peek_span}
     }
     fn dup() -> reader { dup_string_reader(self) as reader }
@@ -87,7 +87,7 @@ impl string_reader: reader {
 
 impl tt_reader: reader {
     fn is_eof() -> bool { self.cur_tok == token::EOF }
-    fn next_token() -> {tok: token::token, sp: span} {
+    fn next_token() -> {tok: token::Token, sp: span} {
         /* weird resolve bug: if the following `if`, or any of its
         statements are removed, we get resolution errors */
         if false {
@@ -101,7 +101,7 @@ impl tt_reader: reader {
     }
     fn span_diag() -> span_handler { self.sp_diag }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::token, sp: span} {
+    fn peek() -> {tok: token::Token, sp: span} {
         { tok: self.cur_tok, sp: self.cur_span }
     }
     fn dup() -> reader { dup_tt_reader(self) as reader }
@@ -196,14 +196,14 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
 
 // might return a sugared-doc-attr
 fn consume_whitespace_and_comments(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
     while is_whitespace(rdr.curr) { bump(rdr); }
     return consume_any_line_comment(rdr);
 }
 
 // might return a sugared-doc-attr
 fn consume_any_line_comment(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
     if rdr.curr == '/' {
         match nextch(rdr) {
           '/' => {
@@ -246,7 +246,7 @@ fn consume_any_line_comment(rdr: string_reader)
 
 // might return a sugared-doc-attr
 fn consume_block_comment(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
 
     // block comments starting with "/**" or "/*!" are doc-comments
     if rdr.curr == '*' || rdr.curr == '!' {
@@ -317,7 +317,7 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
     };
 }
 
-fn scan_number(c: char, rdr: string_reader) -> token::token {
+fn scan_number(c: char, rdr: string_reader) -> token::Token {
     let mut num_str, base = 10u, c = c, n = nextch(rdr);
     if c == '0' && n == 'x' {
         bump(rdr);
@@ -435,7 +435,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char {
     return accum_int as char;
 }
 
-fn next_token_inner(rdr: string_reader) -> token::token {
+fn next_token_inner(rdr: string_reader) -> token::Token {
     let mut accum_str = ~"";
     let mut c = rdr.curr;
     if (c >= 'a' && c <= 'z')
@@ -460,7 +460,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
     if is_dec_digit(c) {
         return scan_number(c, rdr);
     }
-    fn binop(rdr: string_reader, op: token::binop) -> token::token {
+    fn binop(rdr: string_reader, op: token::binop) -> token::Token {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 828d498ca3c..1f607d849d9 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -10,7 +10,7 @@ removed.
 use codemap::span;
 use ast::{expr, expr_lit, lit_nil};
 use ast_util::{respan};
-use token::token;
+use token::Token;
 
 /// The specific types of unsupported syntax
 pub enum ObsoleteSyntax {
@@ -23,7 +23,8 @@ pub enum ObsoleteSyntax {
     ObsoleteClassTraits,
     ObsoletePrivSection,
     ObsoleteModeInFnType,
-    ObsoleteByMutRefMode
+    ObsoleteByMutRefMode,
+    ObsoleteFixedLengthVec,
 }
 
 impl ObsoleteSyntax : cmp::Eq {
@@ -47,7 +48,7 @@ pub trait ObsoleteReporter {
     fn obsolete_expr(sp: span, kind: ObsoleteSyntax) -> @expr;
 }
 
-impl parser : ObsoleteReporter {
+impl Parser : ObsoleteReporter {
     /// Reports an obsolete syntax non-fatal error.
     fn obsolete(sp: span, kind: ObsoleteSyntax) {
         let (kind_str, desc) = match kind {
@@ -99,6 +100,11 @@ impl parser : ObsoleteReporter {
                 "by-mutable-reference mode",
                 "Declare an argument of type &mut T instead"
             ),
+            ObsoleteFixedLengthVec => (
+                "fixed-length vector",
+                "Fixed-length types are now written `[T * N]`, and instances \
+                 are type-inferred"
+            )
         };
 
         self.report(sp, kind, kind_str, desc);
@@ -121,7 +127,7 @@ impl parser : ObsoleteReporter {
         }
     }
 
-    fn token_is_obsolete_ident(ident: &str, token: token) -> bool {
+    fn token_is_obsolete_ident(ident: &str, token: Token) -> bool {
         match token {
             token::IDENT(copy sid, _) => {
                 str::eq_slice(*self.id_to_str(sid), ident)
@@ -183,5 +189,66 @@ impl parser : ObsoleteReporter {
             false
         }
     }
+
+    fn try_parse_obsolete_fixed_vstore() -> Option<Option<uint>> {
+        if self.token == token::BINOP(token::SLASH) {
+            self.bump();
+            match copy self.token {
+                token::UNDERSCORE => {
+                    self.obsolete(copy self.last_span,
+                                  ObsoleteFixedLengthVec);
+                    self.bump(); Some(None)
+                }
+                token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => {
+                    self.obsolete(copy self.last_span,
+                                  ObsoleteFixedLengthVec);
+                    self.bump(); Some(Some(i as uint))
+                }
+                _ => None
+            }
+        } else {
+            None
+        }
+    }
+
+    fn try_convert_ty_to_obsolete_fixed_length_vstore(sp: span, t: ast::ty_)
+        -> ast::ty_ {
+        match self.try_parse_obsolete_fixed_vstore() {
+            // Consider a fixed length vstore suffix (/N or /_)
+            None => t,
+            Some(v) => {
+                ast::ty_fixed_length(
+                    @{id: self.get_id(), node: t, span: sp}, v)
+            }
+        }
+    }
+
+    fn try_convert_expr_to_obsolete_fixed_length_vstore(
+        lo: uint, hi: uint, ex: ast::expr_
+    ) -> (uint, ast::expr_) {
+
+        let mut hi = hi;
+        let mut ex = ex;
+
+        // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...)
+        // only.
+        match ex {
+            ast::expr_lit(@{node: ast::lit_str(_), span: _}) |
+            ast::expr_vec(_, _)  => {
+                match self.try_parse_obsolete_fixed_vstore() {
+                    None => (),
+                    Some(v) => {
+                        hi = self.span.hi;
+                        ex = ast::expr_vstore(self.mk_expr(lo, hi, ex),
+                                              ast::expr_vstore_fixed(v));
+                    }
+                }
+            }
+            _ => ()
+        }
+
+        return (hi, ex);
+    }
+
 }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 22c25186c91..925da063ca6 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -6,7 +6,7 @@ use std::map::HashMap;
 use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
             INTERPOLATED, special_idents};
 use codemap::{span,fss_none};
-use util::interner::interner;
+use util::interner::Interner;
 use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
 use lexer::reader;
 use prec::{as_prec, token_to_binop};
@@ -22,10 +22,9 @@ use obsolete::{
     ObsoleteWith, ObsoleteClassMethod, ObsoleteClassTraits,
     ObsoleteModeInFnType, ObsoleteByMutRefMode
 };
-use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
+use ast::{_mod, add, arg, arm, attribute,
              bind_by_ref, bind_by_implicit_ref, bind_by_value, bind_by_move,
-             bitand, bitor, bitxor, blk, blk_check_mode, bound_const,
-             bound_copy, bound_send, bound_trait, bound_owned, box, by_copy,
+             bitand, bitor, bitxor, blk, blk_check_mode, box, by_copy,
              by_move, by_ref, by_val, capture_clause,
              capture_item, cdir_dir_mod, cdir_src_mod, cdir_view_item,
              class_immutable, class_mutable,
@@ -58,7 +57,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
              stmt_semi, struct_def, struct_field, struct_variant_kind,
              subtract, sty_box, sty_by_ref, sty_region, sty_static, sty_uniq,
              sty_value, token_tree, trait_method, trait_ref, tt_delim, tt_seq,
-             tt_tok, tt_nonterminal, tuple_variant_kind, ty, ty_, ty_bot,
+             tt_tok, tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot,
              ty_box, ty_field, ty_fn, ty_infer, ty_mac, ty_method, ty_nil,
              ty_param, ty_param_bound, ty_path, ty_ptr, ty_rec, ty_rptr,
              ty_tup, ty_u32, ty_uniq, ty_vec, ty_fixed_length, type_value_ns,
@@ -71,11 +70,11 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
              expr_vstore_uniq};
 
 export file_type;
-export parser;
+export Parser;
 export CRATE_FILE;
 export SOURCE_FILE;
 
-// FIXME (#1893): #ast expects to find this here but it's actually
+// FIXME (#3726): #ast expects to find this here but it's actually
 // defined in `parse` Fixing this will be easier when we have export
 // decls on individual items -- then parse can export this publicly, and
 // everything else crate-visibly.
@@ -115,8 +114,7 @@ enum class_member {
   So that we can distinguish a class ctor or dtor
   from other class members
  */
-enum class_contents { ctor_decl(fn_decl, ~[attribute], blk, codemap::span),
-                      dtor_decl(blk, ~[attribute], codemap::span),
+enum class_contents { dtor_decl(blk, ~[attribute], codemap::span),
                       members(~[@class_member]) }
 
 type arg_or_capture_item = Either<arg, capture_item>;
@@ -125,12 +123,13 @@ type item_info = (ident, item_, Option<~[attribute]>);
 enum item_or_view_item {
     iovi_none,
     iovi_item(@item),
+    iovi_foreign_item(@foreign_item),
     iovi_view_item(@view_item)
 }
 
 enum view_item_parse_mode {
     VIEW_ITEMS_AND_ITEMS_ALLOWED,
-    VIEW_ITEMS_ALLOWED,
+    VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED,
     IMPORTS_AND_ITEMS_ALLOWED
 }
 
@@ -191,14 +190,14 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
 
 /* ident is handled by common.rs */
 
-fn parser(sess: parse_sess, cfg: ast::crate_cfg,
-          +rdr: reader, ftype: file_type) -> parser {
+fn Parser(sess: parse_sess, cfg: ast::crate_cfg,
+          +rdr: reader, ftype: file_type) -> Parser {
 
     let tok0 = rdr.next_token();
     let span0 = tok0.sp;
     let interner = rdr.interner();
 
-    parser {
+    Parser {
         reader: move rdr,
         interner: move interner,
         sess: sess,
@@ -207,12 +206,7 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg,
         token: tok0.tok,
         span: span0,
         last_span: span0,
-        buffer: [mut
-            {tok: tok0.tok, sp: span0},
-            {tok: tok0.tok, sp: span0},
-            {tok: tok0.tok, sp: span0},
-            {tok: tok0.tok, sp: span0}
-        ]/4,
+        buffer: [mut {tok: tok0.tok, sp: span0}, ..4],
         buffer_start: 0,
         buffer_end: 0,
         restriction: UNRESTRICTED,
@@ -224,14 +218,14 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg,
     }
 }
 
-struct parser {
+struct Parser {
     sess: parse_sess,
     cfg: crate_cfg,
     file_type: file_type,
-    mut token: token::token,
+    mut token: token::Token,
     mut span: span,
     mut last_span: span,
-    mut buffer: [mut {tok: token::token, sp: span}]/4,
+    mut buffer: [mut {tok: token::Token, sp: span} * 4],
     mut buffer_start: int,
     mut buffer_end: int,
     mut restriction: restriction,
@@ -248,7 +242,7 @@ struct parser {
     drop {} /* do not copy the parser; its state is tied to outside state */
 }
 
-impl parser {
+impl Parser {
     fn bump() {
         self.last_span = self.span;
         let next = if self.buffer_start == self.buffer_end {
@@ -261,7 +255,7 @@ impl parser {
         self.token = next.tok;
         self.span = next.sp;
     }
-    fn swap(next: token::token, lo: uint, hi: uint) {
+    fn swap(next: token::Token, lo: uint, hi: uint) {
         self.token = next;
         self.span = mk_sp(lo, hi);
     }
@@ -271,7 +265,7 @@ impl parser {
         }
         return (4 - self.buffer_start) + self.buffer_end;
     }
-    fn look_ahead(distance: uint) -> token::token {
+    fn look_ahead(distance: uint) -> token::Token {
         let dist = distance as int;
         while self.buffer_length() < dist {
             self.buffer[self.buffer_end] = self.reader.next_token();
@@ -412,7 +406,7 @@ impl parser {
         });
     }
 
-    fn parse_ret_ty() -> (ret_style, @ty) {
+    fn parse_ret_ty() -> (ret_style, @Ty) {
         return if self.eat(token::RARROW) {
             let lo = self.span.lo;
             if self.eat(token::NOT) {
@@ -473,7 +467,7 @@ impl parser {
         self.region_from_name(name)
     }
 
-    fn parse_ty(colons_before_params: bool) -> @ty {
+    fn parse_ty(colons_before_params: bool) -> @Ty {
         maybe_whole!(self, nt_ty);
 
         let lo = self.span.lo;
@@ -558,14 +552,13 @@ impl parser {
         } else { self.fatal(~"expected type"); };
 
         let sp = mk_sp(lo, self.last_span.hi);
-        return @{id: self.get_id(),
-              node: match self.maybe_parse_fixed_vstore() {
-                // Consider a fixed vstore suffix (/N or /_)
-                None => t,
-                Some(v) => {
-                  ty_fixed_length(@{id: self.get_id(), node:t, span: sp}, v)
-                } },
+        return {
+            let node =
+                self.try_convert_ty_to_obsolete_fixed_length_vstore(sp, t);
+            @{id: self.get_id(),
+              node: node,
               span: sp}
+        };
     }
 
     fn parse_arg_mode() -> mode {
@@ -610,10 +603,10 @@ impl parser {
         }
     }
 
-    fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item)
+    fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item)
         -> arg_or_capture_item {
 
-        fn parse_capture_item(p:parser, is_move: bool) -> capture_item {
+        fn parse_capture_item(p:Parser, is_move: bool) -> capture_item {
             let sp = mk_sp(p.span.lo, p.span.hi);
             let ident = p.parse_ident();
             @{id: p.get_id(), is_move: is_move, name: ident, span: sp}
@@ -696,23 +689,6 @@ impl parser {
         }
     }
 
-    fn maybe_parse_fixed_vstore() -> Option<Option<uint>> {
-        if self.token == token::BINOP(token::SLASH) {
-            self.bump();
-            match copy self.token {
-              token::UNDERSCORE => {
-                self.bump(); Some(None)
-              }
-              token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => {
-                self.bump(); Some(Some(i as uint))
-              }
-              _ => None
-            }
-        } else {
-            None
-        }
-    }
-
     fn maybe_parse_fixed_vstore_with_star() -> Option<Option<uint>> {
         if self.eat(token::BINOP(token::STAR)) {
             match copy self.token {
@@ -729,7 +705,7 @@ impl parser {
         }
     }
 
-    fn lit_from_token(tok: token::token) -> lit_ {
+    fn lit_from_token(tok: token::Token) -> lit_ {
         match tok {
           token::LIT_INT(i, it) => lit_int(i, it),
           token::LIT_UINT(u, ut) => lit_uint(u, ut),
@@ -761,8 +737,8 @@ impl parser {
     }
 
     fn parse_path_without_tps_(
-        parse_ident: fn(parser) -> ident,
-        parse_last_ident: fn(parser) -> ident) -> @path {
+        parse_ident: fn(Parser) -> ident,
+        parse_last_ident: fn(Parser) -> ident) -> @path {
 
         maybe_whole!(self, nt_path);
         let lo = self.span.lo;
@@ -843,7 +819,7 @@ impl parser {
         }
     }
 
-    fn parse_field(sep: token::token) -> field {
+    fn parse_field(sep: token::Token) -> field {
         let lo = self.span.lo;
         let m = self.parse_mutability();
         let i = self.parse_ident();
@@ -1088,20 +1064,8 @@ impl parser {
             ex = expr_lit(@lit);
         }
 
-        // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...)
-        // only.
-        match ex {
-          expr_lit(@{node: lit_str(_), span: _}) |
-          expr_vec(_, _)  => match self.maybe_parse_fixed_vstore() {
-            None => (),
-            Some(v) => {
-                hi = self.span.hi;
-                ex = expr_vstore(self.mk_expr(lo, hi, ex),
-                                 expr_vstore_fixed(v));
-            }
-          },
-          _ => ()
-        }
+        let (hi, ex) =
+            self.try_convert_expr_to_obsolete_fixed_length_vstore(lo, hi, ex);
 
         return self.mk_pexpr(lo, hi, ex);
     }
@@ -1221,7 +1185,7 @@ impl parser {
         return e;
     }
 
-    fn parse_sep_and_zerok() -> (Option<token::token>, bool) {
+    fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
         if self.token == token::BINOP(token::STAR)
             || self.token == token::BINOP(token::PLUS) {
             let zerok = self.token == token::BINOP(token::STAR);
@@ -1244,7 +1208,7 @@ impl parser {
     fn parse_token_tree() -> token_tree {
         maybe_whole!(deref self, nt_tt);
 
-        fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
+        fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree {
             match p.token {
               token::RPAREN | token::RBRACE | token::RBRACKET
               if !delim_ok => {
@@ -1311,8 +1275,8 @@ impl parser {
     // This goofy function is necessary to correctly match parens in matchers.
     // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be
     // invalid. It's similar to common::parse_seq.
-    fn parse_matcher_subseq(name_idx: @mut uint, bra: token::token,
-                            ket: token::token) -> ~[matcher] {
+    fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token,
+                            ket: token::Token) -> ~[matcher] {
         let mut ret_val = ~[];
         let mut lparens = 0u;
 
@@ -2159,7 +2123,7 @@ impl parser {
     fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt {
         maybe_whole!(self, nt_stmt);
 
-        fn check_expected_item(p: parser, current_attrs: ~[attribute]) {
+        fn check_expected_item(p: Parser, current_attrs: ~[attribute]) {
             // If we have attributes then we should have an item
             if vec::is_not_empty(current_attrs) {
                 p.fatal(~"expected item");
@@ -2185,7 +2149,7 @@ impl parser {
 
             let item_attrs = vec::append(first_item_attrs, item_attrs);
 
-            match self.parse_item_or_view_item(item_attrs, true) {
+            match self.parse_item_or_view_item(item_attrs, true, false) {
               iovi_item(i) => {
                 let mut hi = i.span.hi;
                 let decl = @spanned(lo, hi, decl_item(i));
@@ -2195,6 +2159,9 @@ impl parser {
                 self.span_fatal(vi.span, ~"view items must be declared at \
                                            the top of the block");
               }
+              iovi_foreign_item(_) => {
+                  self.fatal(~"foreign items are not allowed here");
+              }
               iovi_none() => { /* fallthrough */ }
             }
 
@@ -2222,7 +2189,7 @@ impl parser {
 
         maybe_whole!(pair_empty self, nt_block);
 
-        fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
+        fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) ->
             {inner: ~[attribute], next: ~[attribute]} {
             if parse_attrs {
                 p.parse_inner_attrs_and_next()
@@ -2260,7 +2227,7 @@ impl parser {
         let mut stmts = ~[];
         let mut expr = None;
 
-        let {attrs_remaining, view_items, items: items} =
+        let {attrs_remaining, view_items, items: items, _} =
             self.parse_items_and_view_items(first_item_attrs,
                                             IMPORTS_AND_ITEMS_ALLOWED);
 
@@ -2327,19 +2294,20 @@ impl parser {
         return spanned(lo, hi, bloc);
     }
 
+    fn mk_ty_path(i: ident) -> @Ty {
+        @{id: self.get_id(), node: ty_path(
+            ident_to_path(copy self.last_span, i),
+            self.get_id()), span: self.last_span}
+    }
+
     fn parse_optional_ty_param_bounds() -> @~[ty_param_bound] {
         let mut bounds = ~[];
         if self.eat(token::COLON) {
             while is_ident(self.token) {
                 if is_ident(self.token) {
-                    // XXX: temporary until kinds become traits
                     let maybe_bound = match self.token {
                       token::IDENT(copy sid, _) => {
                         match *self.id_to_str(sid) {
-                          ~"Send" => Some(bound_send),
-                          ~"Copy" => Some(bound_copy),
-                          ~"Const" => Some(bound_const),
-                          ~"Owned" => Some(bound_owned),
 
                           ~"send"
                           | ~"copy"
@@ -2349,7 +2317,7 @@ impl parser {
                                           ObsoleteLowerCaseKindBounds);
                             // Bogus value, but doesn't matter, since
                             // is an error
-                            Some(bound_send)
+                            Some(ty_param_bound(self.mk_ty_path(sid)))
                           }
 
                           _ => None
@@ -2364,11 +2332,11 @@ impl parser {
                             bounds.push(bound);
                         }
                         None => {
-                            bounds.push(bound_trait(self.parse_ty(false)));
+                            bounds.push(ty_param_bound(self.parse_ty(false)));
                         }
                     }
                 } else {
-                    bounds.push(bound_trait(self.parse_ty(false)));
+                    bounds.push(ty_param_bound(self.parse_ty(false)));
                 }
             }
         }
@@ -2387,7 +2355,7 @@ impl parser {
         } else { ~[] }
     }
 
-    fn parse_fn_decl(parse_arg_fn: fn(parser) -> arg_or_capture_item)
+    fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item)
         -> (fn_decl, capture_clause) {
 
         let args_or_capture_items: ~[arg_or_capture_item] =
@@ -2414,18 +2382,18 @@ impl parser {
 
     fn expect_self_ident() {
         if !self.is_self_ident() {
-            self.fatal(#fmt("expected `self` but found `%s`",
+            self.fatal(fmt!("expected `self` but found `%s`",
                             token_to_str(self.reader, self.token)));
         }
         self.bump();
     }
 
     fn parse_fn_decl_with_self(parse_arg_fn:
-                                    fn(parser) -> arg_or_capture_item)
+                                    fn(Parser) -> arg_or_capture_item)
                             -> (self_ty, fn_decl, capture_clause) {
 
         fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_,
-                               p: parser) -> ast::self_ty_ {
+                               p: Parser) -> ast::self_ty_ {
             // We need to make sure it isn't a mode or a type
             if p.token_is_keyword(~"self", p.look_ahead(1)) ||
                 ((p.token_is_keyword(~"const", p.look_ahead(1)) ||
@@ -2605,7 +2573,7 @@ impl parser {
     // Parses four variants (with the region/type params always optional):
     //    impl<T> ~[T] : to_str { ... }
     fn parse_item_impl() -> item_info {
-        fn wrap_path(p: parser, pt: @path) -> @ty {
+        fn wrap_path(p: Parser, pt: @path) -> @Ty {
             @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
         }
 
@@ -2665,7 +2633,7 @@ impl parser {
           ref_id: self.get_id(), impl_id: self.get_id()}
     }
 
-    fn parse_trait_ref_list(ket: token::token) -> ~[@trait_ref] {
+    fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] {
         self.parse_seq_to_before_end(
             ket, seq_sep_trailing_disallowed(token::COMMA),
             |p| p.parse_trait_ref())
@@ -2683,34 +2651,17 @@ impl parser {
 
         let mut fields: ~[@struct_field];
         let mut methods: ~[@method] = ~[];
-        let mut the_ctor: Option<(fn_decl, ~[attribute], blk, codemap::span)>
-            = None;
         let mut the_dtor: Option<(blk, ~[attribute], codemap::span)> = None;
-        let ctor_id = self.get_id();
 
         if self.eat(token::LBRACE) {
             // It's a record-like struct.
             fields = ~[];
             while self.token != token::RBRACE {
                 match self.parse_class_item() {
-                  ctor_decl(a_fn_decl, attrs, blk, s) => {
-                      match the_ctor {
-                        Some((_, _, _, s_first)) => {
-                          self.span_note(s, #fmt("Duplicate constructor \
-                                     declaration for class %s",
-                                     *self.interner.get(class_name)));
-                           self.span_fatal(copy s_first, ~"First constructor \
-                                                          declared here");
-                        }
-                        None    => {
-                          the_ctor = Some((a_fn_decl, attrs, blk, s));
-                        }
-                      }
-                  }
                   dtor_decl(blk, attrs, s) => {
                       match the_dtor {
                         Some((_, _, s_first)) => {
-                          self.span_note(s, #fmt("Duplicate destructor \
+                          self.span_note(s, fmt!("Duplicate destructor \
                                      declaration for class %s",
                                      *self.interner.get(class_name)));
                           self.span_fatal(copy s_first, ~"First destructor \
@@ -2764,39 +2715,17 @@ impl parser {
                     self_id: self.get_id(),
                     body: d_body},
              span: d_s}};
-        match the_ctor {
-          Some((ct_d, ct_attrs, ct_b, ct_s)) => {
-            (class_name,
-             item_class(@{
-                traits: traits,
-                fields: move fields,
-                methods: move methods,
-                ctor: Some({
-                 node: {id: ctor_id,
-                        attrs: ct_attrs,
-                        self_id: self.get_id(),
-                        dec: ct_d,
-                        body: ct_b},
-                 span: ct_s}),
-                dtor: actual_dtor
-             }, ty_params),
-             None)
-          }
-          None => {
-            (class_name,
-             item_class(@{
-                    traits: traits,
-                    fields: move fields,
-                    methods: move methods,
-                    ctor: None,
-                    dtor: actual_dtor
-             }, ty_params),
-             None)
-          }
-        }
+        (class_name,
+         item_class(@{
+             traits: traits,
+             fields: move fields,
+             methods: move methods,
+             dtor: actual_dtor
+         }, ty_params),
+         None)
     }
 
-    fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
+    fn token_is_pound_or_doc_comment(++tok: token::Token) -> bool {
         match tok {
             token::POUND | token::DOC_COMMENT(_) => true,
             _ => false
@@ -2881,10 +2810,10 @@ impl parser {
         self.eat_keyword(~"static")
     }
 
-    fn parse_mod_items(term: token::token,
+    fn parse_mod_items(term: token::Token,
                        +first_item_attrs: ~[attribute]) -> _mod {
         // Shouldn't be any view items since we've already parsed an item attr
-        let {attrs_remaining, view_items, items: starting_items} =
+        let {attrs_remaining, view_items, items: starting_items, _} =
             self.parse_items_and_view_items(first_item_attrs,
                                             VIEW_ITEMS_AND_ITEMS_ALLOWED);
         let mut items: ~[@item] = move starting_items;
@@ -2898,7 +2827,7 @@ impl parser {
             }
             debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)",
                    attrs);
-            match self.parse_item_or_view_item(attrs, true) {
+            match self.parse_item_or_view_item(attrs, true, false) {
               iovi_item(item) => items.push(item),
               iovi_view_item(view_item) => {
                 self.span_fatal(view_item.span, ~"view items must be \
@@ -2998,11 +2927,11 @@ impl parser {
                                +first_item_attrs: ~[attribute]) ->
         foreign_mod {
         // Shouldn't be any view items since we've already parsed an item attr
-        let {attrs_remaining, view_items, items: _} =
+        let {attrs_remaining, view_items, items: _, foreign_items} =
             self.parse_items_and_view_items(first_item_attrs,
-                                            VIEW_ITEMS_ALLOWED);
+                                        VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED);
 
-        let mut items: ~[@foreign_item] = ~[];
+        let mut items: ~[@foreign_item] = move foreign_items;
         let mut initial_attrs = attrs_remaining;
         while self.token != token::RBRACE {
             let attrs = vec::append(initial_attrs,
@@ -3011,7 +2940,7 @@ impl parser {
             items.push(self.parse_foreign_item(attrs));
         }
         return {sort: sort, view_items: view_items,
-             items: items};
+            items: items};
     }
 
     fn parse_item_foreign_mod(lo: uint,
@@ -3097,12 +3026,6 @@ impl parser {
         let mut methods: ~[@method] = ~[];
         while self.token != token::RBRACE {
             match self.parse_class_item() {
-                ctor_decl(*) => {
-                    self.span_fatal(copy self.span,
-                                    ~"deprecated explicit \
-                                      constructors are not allowed \
-                                      here");
-                }
                 dtor_decl(blk, attrs, s) => {
                     match the_dtor {
                         Some((_, _, s_first)) => {
@@ -3143,7 +3066,6 @@ impl parser {
             traits: ~[],
             fields: move fields,
             methods: move methods,
-            ctor: None,
             dtor: actual_dtor
         };
     }
@@ -3269,15 +3191,18 @@ impl parser {
         }
     }
 
-    fn fn_expr_lookahead(tok: token::token) -> bool {
+    fn fn_expr_lookahead(tok: token::Token) -> bool {
         match tok {
           token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
           _ => false
         }
     }
 
-    fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool)
+    fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool,
+                               foreign_items_allowed: bool)
                             -> item_or_view_item {
+        assert items_allowed != foreign_items_allowed;
+
         maybe_whole!(iovi self,nt_item);
         let lo = self.span.lo;
 
@@ -3295,6 +3220,9 @@ impl parser {
             return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
                                           visibility,
                                           maybe_append(attrs, extra_attrs)));
+        } else if foreign_items_allowed && self.is_keyword(~"const") {
+            let item = self.parse_item_foreign_const(visibility, attrs);
+            return iovi_foreign_item(item);
         } else if items_allowed &&
             self.is_keyword(~"fn") &&
             !self.fn_expr_lookahead(self.look_ahead(1u)) {
@@ -3309,6 +3237,10 @@ impl parser {
             return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
                                           visibility,
                                           maybe_append(attrs, extra_attrs)));
+        } else if foreign_items_allowed &&
+            (self.is_keyword(~"fn") || self.is_keyword(~"pure")) {
+                let item = self.parse_item_foreign_fn(visibility, attrs);
+                return iovi_foreign_item(item);
         } else if items_allowed && self.is_keyword(~"unsafe")
             && self.look_ahead(1u) != token::LBRACE {
             self.bump();
@@ -3395,16 +3327,24 @@ impl parser {
             return iovi_item(self.mk_item(lo, self.last_span.hi, id, item_,
                                           visibility, attrs));
         } else {
+            if visibility != inherited {
+                let mut s = ~"unmatched visibility `";
+                s += if visibility == public { ~"pub" } else { ~"priv" };
+                s += ~"`";
+                self.span_fatal(copy self.last_span, s);
+            }
             return iovi_none;
         };
     }
 
     fn parse_item(+attrs: ~[attribute]) -> Option<@ast::item> {
-        match self.parse_item_or_view_item(attrs, true) {
+        match self.parse_item_or_view_item(attrs, true, false) {
             iovi_none =>
                 None,
             iovi_view_item(_) =>
                 self.fatal(~"view items are not allowed here"),
+            iovi_foreign_item(_) =>
+                self.fatal(~"foreign items are not allowed here"),
             iovi_item(item) =>
                 Some(item)
         }
@@ -3539,28 +3479,35 @@ impl parser {
                                   mode: view_item_parse_mode)
                                -> {attrs_remaining: ~[attribute],
                                    view_items: ~[@view_item],
-                                   items: ~[@item]} {
+                                   items: ~[@item],
+                                   foreign_items: ~[@foreign_item]} {
         let mut attrs = vec::append(first_item_attrs,
                                     self.parse_outer_attributes());
 
-        let items_allowed;
-        match mode {
-            VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED =>
-                items_allowed = true,
-            VIEW_ITEMS_ALLOWED =>
-                items_allowed = false
-        }
+        let items_allowed = match mode {
+            VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => true,
+            VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => false
+        };
 
-        let (view_items, items) = (DVec(), DVec());
+        let restricted_to_imports = match mode {
+            IMPORTS_AND_ITEMS_ALLOWED => true,
+            VIEW_ITEMS_AND_ITEMS_ALLOWED |
+            VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => false
+        };
+
+        let foreign_items_allowed = match mode {
+            VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => true,
+            VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => false
+        };
+
+        let (view_items, items, foreign_items) = (DVec(), DVec(), DVec());
         loop {
-            match self.parse_item_or_view_item(attrs, items_allowed) {
+            match self.parse_item_or_view_item(attrs, items_allowed,
+                                               foreign_items_allowed) {
                 iovi_none =>
                     break,
                 iovi_view_item(view_item) => {
-                    match mode {
-                        VIEW_ITEMS_AND_ITEMS_ALLOWED |
-                        VIEW_ITEMS_ALLOWED => {}
-                        IMPORTS_AND_ITEMS_ALLOWED =>
+                    if restricted_to_imports {
                             match view_item.node {
                                 view_item_import(_) => {}
                                 view_item_export(_) | view_item_use(*) =>
@@ -3575,13 +3522,18 @@ impl parser {
                     assert items_allowed;
                     items.push(item)
                 }
+                iovi_foreign_item(foreign_item) => {
+                    assert foreign_items_allowed;
+                    foreign_items.push(foreign_item);
+                }
             }
             attrs = self.parse_outer_attributes();
         }
 
         {attrs_remaining: attrs,
          view_items: dvec::unwrap(move view_items),
-         items: dvec::unwrap(move items)}
+         items: dvec::unwrap(move items),
+         foreign_items: dvec::unwrap(move foreign_items)}
     }
 
     // Parses a source module as a crate
@@ -3655,7 +3607,7 @@ impl parser {
         return self.fatal(~"expected crate directive");
     }
 
-    fn parse_crate_directives(term: token::token,
+    fn parse_crate_directives(term: token::Token,
                               first_outer_attr: ~[attribute]) ->
         ~[@crate_directive] {
 
diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs
index 668301db620..3fd905cb8ec 100644
--- a/src/libsyntax/parse/prec.rs
+++ b/src/libsyntax/parse/prec.rs
@@ -3,7 +3,7 @@ export unop_prec;
 export token_to_binop;
 
 use token::*;
-use token::token;
+use token::Token;
 use ast::*;
 
 /// Unary operators have higher precedence than binary
@@ -19,7 +19,7 @@ const as_prec: uint = 11u;
  * Maps a token to a record specifying the corresponding binary
  * operator and its precedence
  */
-fn token_to_binop(tok: token) -> Option<ast::binop> {
+fn token_to_binop(tok: Token) -> Option<ast::binop> {
   match tok {
       BINOP(STAR)    => Some(mul),
       BINOP(SLASH)   => Some(div),
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 99b789cf63f..5151fd1bac8 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,18 +1,9 @@
 use util::interner;
-use util::interner::interner;
+use util::interner::Interner;
 use std::map::HashMap;
-use std::serialization::{Serializer,
-                            Deserializer,
-                            serialize_uint,
-                            deserialize_uint,
-                            serialize_i64,
-                            deserialize_i64,
-                            serialize_u64,
-                            deserialize_u64,
-                            serialize_bool,
-                            deserialize_bool};
 
 #[auto_serialize]
+#[auto_deserialize]
 enum binop {
     PLUS,
     MINUS,
@@ -27,7 +18,8 @@ enum binop {
 }
 
 #[auto_serialize]
-enum token {
+#[auto_deserialize]
+enum Token {
     /* Expression-operator symbols. */
     EQ,
     LT,
@@ -84,6 +76,7 @@ enum token {
 }
 
 #[auto_serialize]
+#[auto_deserialize]
 /// For interpolation during macro expansion.
 enum nonterminal {
     nt_item(@ast::item),
@@ -91,7 +84,7 @@ enum nonterminal {
     nt_stmt(@ast::stmt),
     nt_pat( @ast::pat),
     nt_expr(@ast::expr),
-    nt_ty(  @ast::ty),
+    nt_ty(  @ast::Ty),
     nt_ident(ast::ident, bool),
     nt_path(@ast::path),
     nt_tt(  @ast::token_tree), //needs @ed to break a circularity
@@ -113,7 +106,7 @@ fn binop_to_str(o: binop) -> ~str {
     }
 }
 
-fn to_str(in: @ident_interner, t: token) -> ~str {
+fn to_str(in: @ident_interner, t: Token) -> ~str {
     match t {
       EQ => ~"=",
       LT => ~"<",
@@ -199,7 +192,7 @@ fn to_str(in: @ident_interner, t: token) -> ~str {
     }
 }
 
-pure fn can_begin_expr(t: token) -> bool {
+pure fn can_begin_expr(t: Token) -> bool {
     match t {
       LPAREN => true,
       LBRACE => true,
@@ -230,7 +223,7 @@ pure fn can_begin_expr(t: token) -> bool {
 }
 
 /// what's the opposite delimiter?
-fn flip_delimiter(t: token::token) -> token::token {
+fn flip_delimiter(t: token::Token) -> token::Token {
     match t {
       token::LPAREN => token::RPAREN,
       token::LBRACE => token::RBRACE,
@@ -244,7 +237,7 @@ fn flip_delimiter(t: token::token) -> token::token {
 
 
 
-fn is_lit(t: token) -> bool {
+fn is_lit(t: Token) -> bool {
     match t {
       LIT_INT(_, _) => true,
       LIT_UINT(_, _) => true,
@@ -255,22 +248,22 @@ fn is_lit(t: token) -> bool {
     }
 }
 
-pure fn is_ident(t: token) -> bool {
+pure fn is_ident(t: Token) -> bool {
     match t { IDENT(_, _) => true, _ => false }
 }
 
-pure fn is_ident_or_path(t: token) -> bool {
+pure fn is_ident_or_path(t: Token) -> bool {
     match t {
       IDENT(_, _) | INTERPOLATED(nt_path(*)) => true,
       _ => false
     }
 }
 
-pure fn is_plain_ident(t: token) -> bool {
+pure fn is_plain_ident(t: Token) -> bool {
     match t { IDENT(_, false) => true, _ => false }
 }
 
-pure fn is_bar(t: token) -> bool {
+pure fn is_bar(t: Token) -> bool {
     match t { BINOP(OR) | OROR => true, _ => false }
 }
 
@@ -321,7 +314,7 @@ mod special_idents {
 }
 
 struct ident_interner {
-    priv interner: util::interner::interner<@~str>,
+    priv interner: util::interner::Interner<@~str>,
 }
 
 impl ident_interner {
@@ -350,28 +343,33 @@ macro_rules! interner_key (
 )
 
 fn mk_ident_interner() -> @ident_interner {
-    /* the indices here must correspond to the numbers in special_idents */
-    let init_vec = ~[@~"_", @~"anon", @~"drop", @~"", @~"unary", @~"!",
-                     @~"[]", @~"unary-", @~"__extensions__", @~"self",
-                     @~"item", @~"block", @~"stmt", @~"pat", @~"expr",
-                     @~"ty", @~"ident", @~"path", @~"tt", @~"matchers",
-                     @~"str", @~"TyVisitor", @~"arg", @~"descrim",
-                     @~"__rust_abi", @~"__rust_stack_shim", @~"TyDesc",
-                     @~"dtor", @~"main", @~"<opaque>", @~"blk", @~"static",
-                     @~"intrinsic", @~"__foreign_mod__"];
-
-    let rv = @ident_interner {
-        interner: interner::mk_prefill::<@~str>(init_vec)
-    };
-
-    /* having multiple interners will just confuse the serializer */
     unsafe {
-        assert task::local_data::local_data_get(interner_key!()).is_none()
-    };
-    unsafe {
-        task::local_data::local_data_set(interner_key!(), @rv)
-    };
-    rv
+        match task::local_data::local_data_get(interner_key!()) {
+            Some(interner) => *interner,
+            None => {
+                // the indices here must correspond to the numbers in
+                // special_idents.
+                let init_vec = ~[
+                    @~"_", @~"anon", @~"drop", @~"", @~"unary", @~"!",
+                    @~"[]", @~"unary-", @~"__extensions__", @~"self",
+                    @~"item", @~"block", @~"stmt", @~"pat", @~"expr",
+                    @~"ty", @~"ident", @~"path", @~"tt", @~"matchers",
+                    @~"str", @~"TyVisitor", @~"arg", @~"descrim",
+                    @~"__rust_abi", @~"__rust_stack_shim", @~"TyDesc",
+                    @~"dtor", @~"main", @~"<opaque>", @~"blk", @~"static",
+                    @~"intrinsic", @~"__foreign_mod__"
+                ];
+
+                let rv = @ident_interner {
+                    interner: interner::mk_prefill(init_vec)
+                };
+
+                task::local_data::local_data_set(interner_key!(), @rv);
+
+                rv
+            }
+        }
+    }
 }
 
 /* for when we don't care about the contents; doesn't interact with TLD or
@@ -459,8 +457,8 @@ impl binop : cmp::Eq {
     pure fn ne(other: &binop) -> bool { !self.eq(other) }
 }
 
-impl token : cmp::Eq {
-    pure fn eq(other: &token) -> bool {
+impl Token : cmp::Eq {
+    pure fn eq(other: &Token) -> bool {
         match self {
             EQ => {
                 match (*other) {
@@ -722,7 +720,7 @@ impl token : cmp::Eq {
             }
         }
     }
-    pure fn ne(other: &token) -> bool { !self.eq(other) }
+    pure fn ne(other: &Token) -> bool { !self.eq(other) }
 }
 
 // Local Variables: