about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorPatrick Walton <pcwalton@mimiga.net>2012-10-15 14:56:42 -0700
committerPatrick Walton <pcwalton@mimiga.net>2012-10-15 15:35:36 -0700
commit91ae5412d8141ea958924408bf3c1def5edca806 (patch)
tree65e295ba7ac7159ea3f7fe172e1a241114ef5e2d /src/libsyntax/parse
parentc5b82a65e96cfe77e4983e78a34a7d5aa91329b4 (diff)
downloadrust-91ae5412d8141ea958924408bf3c1def5edca806.tar.gz
rust-91ae5412d8141ea958924408bf3c1def5edca806.zip
rustc: Merge module and type namespaces. r=brson
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs2
-rw-r--r--src/libsyntax/parse/common.rs94
-rw-r--r--src/libsyntax/parse/eval.rs2
-rw-r--r--src/libsyntax/parse/lexer.rs26
-rw-r--r--src/libsyntax/parse/obsolete.rs6
-rw-r--r--src/libsyntax/parse/parser.rs70
-rw-r--r--src/libsyntax/parse/prec.rs4
-rw-r--r--src/libsyntax/parse/token.rs30
8 files changed, 117 insertions, 117 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 9be4909814b..42101a431d6 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -23,7 +23,7 @@ trait parser_attr {
     fn parse_optional_meta() -> ~[@ast::meta_item];
 }
 
-impl parser: parser_attr {
+impl Parser: parser_attr {
 
     fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
         -> attr_or_ext
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index c8c30ee7fa9..50c22c08f4f 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -1,63 +1,63 @@
 use std::map::{HashMap};
 use ast_util::spanned;
-use parser::parser;
+use parser::Parser;
 use lexer::reader;
 
 type seq_sep = {
-    sep: Option<token::token>,
+    sep: Option<token::Token>,
     trailing_sep_allowed: bool
 };
 
-fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep {
+fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep {
     return {sep: option::Some(t), trailing_sep_allowed: false};
 }
-fn seq_sep_trailing_allowed(t: token::token) -> seq_sep {
+fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep {
     return {sep: option::Some(t), trailing_sep_allowed: true};
 }
 fn seq_sep_none() -> seq_sep {
     return {sep: option::None, trailing_sep_allowed: false};
 }
 
-fn token_to_str(reader: reader, ++token: token::token) -> ~str {
+fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
     token::to_str(reader.interner(), token)
 }
 
 trait parser_common {
-    fn unexpected_last(t: token::token) -> !;
+    fn unexpected_last(t: token::Token) -> !;
     fn unexpected() -> !;
-    fn expect(t: token::token);
+    fn expect(t: token::Token);
     fn parse_ident() -> ast::ident;
     fn parse_path_list_ident() -> ast::path_list_ident;
     fn parse_value_ident() -> ast::ident;
-    fn eat(tok: token::token) -> bool;
+    fn eat(tok: token::Token) -> bool;
     // A sanity check that the word we are asking for is a known keyword
     fn require_keyword(word: ~str);
-    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool;
+    fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool;
     fn is_keyword(word: ~str) -> bool;
-    fn is_any_keyword(tok: token::token) -> bool;
+    fn is_any_keyword(tok: token::Token) -> bool;
     fn eat_keyword(word: ~str) -> bool;
     fn expect_keyword(word: ~str);
     fn expect_gt();
-    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::token>,
-                                       f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_to_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> spanned<~[T]>;
-    fn parse_seq_to_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_to_before_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> ~[T];
-    fn parse_unspanned_seq<T: Copy>(bra: token::token,
-                                    ket: token::token,
+    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
+                                       f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_to_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> spanned<~[T]>;
+    fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                 f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                        f: fn(Parser) -> T) -> ~[T];
+    fn parse_unspanned_seq<T: Copy>(bra: token::Token,
+                                    ket: token::Token,
                                     sep: seq_sep,
-                                    f: fn(parser) -> T) -> ~[T];
-    fn parse_seq<T: Copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<~[T]>;
+                                    f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
+                          f: fn(Parser) -> T) -> spanned<~[T]>;
 }
 
-impl parser: parser_common {
-    fn unexpected_last(t: token::token) -> ! {
+impl Parser: parser_common {
+    fn unexpected_last(t: token::Token) -> ! {
         self.span_fatal(
             copy self.last_span,
             ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`");
@@ -68,7 +68,7 @@ impl parser: parser_common {
                    + token_to_str(self.reader, self.token) + ~"`");
     }
 
-    fn expect(t: token::token) {
+    fn expect(t: token::Token) {
         if self.token == t {
             self.bump();
         } else {
@@ -104,7 +104,7 @@ impl parser: parser_common {
         return self.parse_ident();
     }
 
-    fn eat(tok: token::token) -> bool {
+    fn eat(tok: token::Token) -> bool {
         return if self.token == tok { self.bump(); true } else { false };
     }
 
@@ -117,14 +117,14 @@ impl parser: parser_common {
         }
     }
 
-    fn token_is_word(word: ~str, ++tok: token::token) -> bool {
+    fn token_is_word(word: ~str, ++tok: token::Token) -> bool {
         match tok {
           token::IDENT(sid, false) => { *self.id_to_str(sid) == word }
           _ => { false }
         }
     }
 
-    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool {
+    fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool {
         self.require_keyword(word);
         self.token_is_word(word, tok)
     }
@@ -133,7 +133,7 @@ impl parser: parser_common {
         self.token_is_keyword(word, self.token)
     }
 
-    fn is_any_keyword(tok: token::token) -> bool {
+    fn is_any_keyword(tok: token::Token) -> bool {
         match tok {
           token::IDENT(sid, false) => {
             self.keywords.contains_key_ref(self.id_to_str(sid))
@@ -216,8 +216,8 @@ impl parser: parser_common {
         }
     }
 
-    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::token>,
-                                       f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
+                                       f: fn(Parser) -> T) -> ~[T] {
         let mut first = true;
         let mut v = ~[];
         while self.token != token::GT
@@ -235,16 +235,16 @@ impl parser: parser_common {
         return v;
     }
 
-    fn parse_seq_to_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> ~[T] {
         let v = self.parse_seq_to_before_gt(sep, f);
         self.expect_gt();
 
         return v;
     }
 
-    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> spanned<~[T]> {
+    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(token::LT);
         let result = self.parse_seq_to_before_gt::<T>(sep, f);
@@ -253,16 +253,16 @@ impl parser: parser_common {
         return spanned(lo, hi, result);
     }
 
-    fn parse_seq_to_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                 f: fn(Parser) -> T) -> ~[T] {
         let val = self.parse_seq_to_before_end(ket, sep, f);
         self.bump();
         return val;
     }
 
 
-    fn parse_seq_to_before_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                        f: fn(Parser) -> T) -> ~[T] {
         let mut first: bool = true;
         let mut v: ~[T] = ~[];
         while self.token != ket {
@@ -279,10 +279,10 @@ impl parser: parser_common {
         return v;
     }
 
-    fn parse_unspanned_seq<T: Copy>(bra: token::token,
-                                    ket: token::token,
+    fn parse_unspanned_seq<T: Copy>(bra: token::Token,
+                                    ket: token::Token,
                                     sep: seq_sep,
-                                    f: fn(parser) -> T) -> ~[T] {
+                                    f: fn(Parser) -> T) -> ~[T] {
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
         self.bump();
@@ -291,8 +291,8 @@ impl parser: parser_common {
 
     // NB: Do not use this function unless you actually plan to place the
     // spanned list in the AST.
-    fn parse_seq<T: Copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<~[T]> {
+    fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
+                          f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index c9106028491..56c9d4de9f3 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -1,4 +1,4 @@
-use parser::{parser, SOURCE_FILE};
+use parser::{Parser, SOURCE_FILE};
 use attr::parser_attr;
 
 export eval_crate_directives_to_mod;
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 06fcc1cf958..8f57d733eb5 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -10,11 +10,11 @@ export string_reader_as_reader, tt_reader_as_reader;
 
 trait reader {
     fn is_eof() -> bool;
-    fn next_token() -> {tok: token::token, sp: span};
+    fn next_token() -> {tok: token::Token, sp: span};
     fn fatal(~str) -> !;
     fn span_diag() -> span_handler;
     pure fn interner() -> @token::ident_interner;
-    fn peek() -> {tok: token::token, sp: span};
+    fn peek() -> {tok: token::Token, sp: span};
     fn dup() -> reader;
 }
 
@@ -28,7 +28,7 @@ type string_reader = @{
     filemap: codemap::filemap,
     interner: @token::ident_interner,
     /* cached: */
-    mut peek_tok: token::token,
+    mut peek_tok: token::Token,
     mut peek_span: span
 };
 
@@ -69,7 +69,7 @@ fn dup_string_reader(&&r: string_reader) -> string_reader {
 
 impl string_reader: reader {
     fn is_eof() -> bool { is_eof(self) }
-    fn next_token() -> {tok: token::token, sp: span} {
+    fn next_token() -> {tok: token::Token, sp: span} {
         let ret_val = {tok: self.peek_tok, sp: self.peek_span};
         string_advance_token(self);
         return ret_val;
@@ -79,7 +79,7 @@ impl string_reader: reader {
     }
     fn span_diag() -> span_handler { self.span_diagnostic }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::token, sp: span} {
+    fn peek() -> {tok: token::Token, sp: span} {
         {tok: self.peek_tok, sp: self.peek_span}
     }
     fn dup() -> reader { dup_string_reader(self) as reader }
@@ -87,7 +87,7 @@ impl string_reader: reader {
 
 impl tt_reader: reader {
     fn is_eof() -> bool { self.cur_tok == token::EOF }
-    fn next_token() -> {tok: token::token, sp: span} {
+    fn next_token() -> {tok: token::Token, sp: span} {
         /* weird resolve bug: if the following `if`, or any of its
         statements are removed, we get resolution errors */
         if false {
@@ -101,7 +101,7 @@ impl tt_reader: reader {
     }
     fn span_diag() -> span_handler { self.sp_diag }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::token, sp: span} {
+    fn peek() -> {tok: token::Token, sp: span} {
         { tok: self.cur_tok, sp: self.cur_span }
     }
     fn dup() -> reader { dup_tt_reader(self) as reader }
@@ -196,14 +196,14 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
 
 // might return a sugared-doc-attr
 fn consume_whitespace_and_comments(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
     while is_whitespace(rdr.curr) { bump(rdr); }
     return consume_any_line_comment(rdr);
 }
 
 // might return a sugared-doc-attr
 fn consume_any_line_comment(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
     if rdr.curr == '/' {
         match nextch(rdr) {
           '/' => {
@@ -246,7 +246,7 @@ fn consume_any_line_comment(rdr: string_reader)
 
 // might return a sugared-doc-attr
 fn consume_block_comment(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
 
     // block comments starting with "/**" or "/*!" are doc-comments
     if rdr.curr == '*' || rdr.curr == '!' {
@@ -317,7 +317,7 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
     };
 }
 
-fn scan_number(c: char, rdr: string_reader) -> token::token {
+fn scan_number(c: char, rdr: string_reader) -> token::Token {
     let mut num_str, base = 10u, c = c, n = nextch(rdr);
     if c == '0' && n == 'x' {
         bump(rdr);
@@ -435,7 +435,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char {
     return accum_int as char;
 }
 
-fn next_token_inner(rdr: string_reader) -> token::token {
+fn next_token_inner(rdr: string_reader) -> token::Token {
     let mut accum_str = ~"";
     let mut c = rdr.curr;
     if (c >= 'a' && c <= 'z')
@@ -460,7 +460,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
     if is_dec_digit(c) {
         return scan_number(c, rdr);
     }
-    fn binop(rdr: string_reader, op: token::binop) -> token::token {
+    fn binop(rdr: string_reader, op: token::binop) -> token::Token {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 828d498ca3c..c0e01fb1944 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -10,7 +10,7 @@ removed.
 use codemap::span;
 use ast::{expr, expr_lit, lit_nil};
 use ast_util::{respan};
-use token::token;
+use token::Token;
 
 /// The specific types of unsupported syntax
 pub enum ObsoleteSyntax {
@@ -47,7 +47,7 @@ pub trait ObsoleteReporter {
     fn obsolete_expr(sp: span, kind: ObsoleteSyntax) -> @expr;
 }
 
-impl parser : ObsoleteReporter {
+impl Parser : ObsoleteReporter {
     /// Reports an obsolete syntax non-fatal error.
     fn obsolete(sp: span, kind: ObsoleteSyntax) {
         let (kind_str, desc) = match kind {
@@ -121,7 +121,7 @@ impl parser : ObsoleteReporter {
         }
     }
 
-    fn token_is_obsolete_ident(ident: &str, token: token) -> bool {
+    fn token_is_obsolete_ident(ident: &str, token: Token) -> bool {
         match token {
             token::IDENT(copy sid, _) => {
                 str::eq_slice(*self.id_to_str(sid), ident)
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 973822ddff9..e29620a7e79 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -6,7 +6,7 @@ use std::map::HashMap;
 use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
             INTERPOLATED, special_idents};
 use codemap::{span,fss_none};
-use util::interner::interner;
+use util::interner::Interner;
 use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
 use lexer::reader;
 use prec::{as_prec, token_to_binop};
@@ -58,7 +58,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
              stmt_semi, struct_def, struct_field, struct_variant_kind,
              subtract, sty_box, sty_by_ref, sty_region, sty_static, sty_uniq,
              sty_value, token_tree, trait_method, trait_ref, tt_delim, tt_seq,
-             tt_tok, tt_nonterminal, tuple_variant_kind, ty, ty_, ty_bot,
+             tt_tok, tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot,
              ty_box, ty_field, ty_fn, ty_infer, ty_mac, ty_method, ty_nil,
              ty_param, ty_param_bound, ty_path, ty_ptr, ty_rec, ty_rptr,
              ty_tup, ty_u32, ty_uniq, ty_vec, ty_fixed_length, type_value_ns,
@@ -71,7 +71,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
              expr_vstore_uniq};
 
 export file_type;
-export parser;
+export Parser;
 export CRATE_FILE;
 export SOURCE_FILE;
 
@@ -190,14 +190,14 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
 
 /* ident is handled by common.rs */
 
-fn parser(sess: parse_sess, cfg: ast::crate_cfg,
-          +rdr: reader, ftype: file_type) -> parser {
+fn Parser(sess: parse_sess, cfg: ast::crate_cfg,
+          +rdr: reader, ftype: file_type) -> Parser {
 
     let tok0 = rdr.next_token();
     let span0 = tok0.sp;
     let interner = rdr.interner();
 
-    parser {
+    Parser {
         reader: move rdr,
         interner: move interner,
         sess: sess,
@@ -223,14 +223,14 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg,
     }
 }
 
-struct parser {
+struct Parser {
     sess: parse_sess,
     cfg: crate_cfg,
     file_type: file_type,
-    mut token: token::token,
+    mut token: token::Token,
     mut span: span,
     mut last_span: span,
-    mut buffer: [mut {tok: token::token, sp: span}]/4,
+    mut buffer: [mut {tok: token::Token, sp: span}]/4,
     mut buffer_start: int,
     mut buffer_end: int,
     mut restriction: restriction,
@@ -247,7 +247,7 @@ struct parser {
     drop {} /* do not copy the parser; its state is tied to outside state */
 }
 
-impl parser {
+impl Parser {
     fn bump() {
         self.last_span = self.span;
         let next = if self.buffer_start == self.buffer_end {
@@ -260,7 +260,7 @@ impl parser {
         self.token = next.tok;
         self.span = next.sp;
     }
-    fn swap(next: token::token, lo: uint, hi: uint) {
+    fn swap(next: token::Token, lo: uint, hi: uint) {
         self.token = next;
         self.span = mk_sp(lo, hi);
     }
@@ -270,7 +270,7 @@ impl parser {
         }
         return (4 - self.buffer_start) + self.buffer_end;
     }
-    fn look_ahead(distance: uint) -> token::token {
+    fn look_ahead(distance: uint) -> token::Token {
         let dist = distance as int;
         while self.buffer_length() < dist {
             self.buffer[self.buffer_end] = self.reader.next_token();
@@ -411,7 +411,7 @@ impl parser {
         });
     }
 
-    fn parse_ret_ty() -> (ret_style, @ty) {
+    fn parse_ret_ty() -> (ret_style, @Ty) {
         return if self.eat(token::RARROW) {
             let lo = self.span.lo;
             if self.eat(token::NOT) {
@@ -472,7 +472,7 @@ impl parser {
         self.region_from_name(name)
     }
 
-    fn parse_ty(colons_before_params: bool) -> @ty {
+    fn parse_ty(colons_before_params: bool) -> @Ty {
         maybe_whole!(self, nt_ty);
 
         let lo = self.span.lo;
@@ -609,10 +609,10 @@ impl parser {
         }
     }
 
-    fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item)
+    fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item)
         -> arg_or_capture_item {
 
-        fn parse_capture_item(p:parser, is_move: bool) -> capture_item {
+        fn parse_capture_item(p:Parser, is_move: bool) -> capture_item {
             let sp = mk_sp(p.span.lo, p.span.hi);
             let ident = p.parse_ident();
             @{id: p.get_id(), is_move: is_move, name: ident, span: sp}
@@ -728,7 +728,7 @@ impl parser {
         }
     }
 
-    fn lit_from_token(tok: token::token) -> lit_ {
+    fn lit_from_token(tok: token::Token) -> lit_ {
         match tok {
           token::LIT_INT(i, it) => lit_int(i, it),
           token::LIT_UINT(u, ut) => lit_uint(u, ut),
@@ -760,8 +760,8 @@ impl parser {
     }
 
     fn parse_path_without_tps_(
-        parse_ident: fn(parser) -> ident,
-        parse_last_ident: fn(parser) -> ident) -> @path {
+        parse_ident: fn(Parser) -> ident,
+        parse_last_ident: fn(Parser) -> ident) -> @path {
 
         maybe_whole!(self, nt_path);
         let lo = self.span.lo;
@@ -842,7 +842,7 @@ impl parser {
         }
     }
 
-    fn parse_field(sep: token::token) -> field {
+    fn parse_field(sep: token::Token) -> field {
         let lo = self.span.lo;
         let m = self.parse_mutability();
         let i = self.parse_ident();
@@ -1220,7 +1220,7 @@ impl parser {
         return e;
     }
 
-    fn parse_sep_and_zerok() -> (Option<token::token>, bool) {
+    fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
         if self.token == token::BINOP(token::STAR)
             || self.token == token::BINOP(token::PLUS) {
             let zerok = self.token == token::BINOP(token::STAR);
@@ -1243,7 +1243,7 @@ impl parser {
     fn parse_token_tree() -> token_tree {
         maybe_whole!(deref self, nt_tt);
 
-        fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
+        fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree {
             match p.token {
               token::RPAREN | token::RBRACE | token::RBRACKET
               if !delim_ok => {
@@ -1310,8 +1310,8 @@ impl parser {
     // This goofy function is necessary to correctly match parens in matchers.
     // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be
     // invalid. It's similar to common::parse_seq.
-    fn parse_matcher_subseq(name_idx: @mut uint, bra: token::token,
-                            ket: token::token) -> ~[matcher] {
+    fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token,
+                            ket: token::Token) -> ~[matcher] {
         let mut ret_val = ~[];
         let mut lparens = 0u;
 
@@ -2158,7 +2158,7 @@ impl parser {
     fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt {
         maybe_whole!(self, nt_stmt);
 
-        fn check_expected_item(p: parser, current_attrs: ~[attribute]) {
+        fn check_expected_item(p: Parser, current_attrs: ~[attribute]) {
             // If we have attributes then we should have an item
             if vec::is_not_empty(current_attrs) {
                 p.fatal(~"expected item");
@@ -2221,7 +2221,7 @@ impl parser {
 
         maybe_whole!(pair_empty self, nt_block);
 
-        fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
+        fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) ->
             {inner: ~[attribute], next: ~[attribute]} {
             if parse_attrs {
                 p.parse_inner_attrs_and_next()
@@ -2386,7 +2386,7 @@ impl parser {
         } else { ~[] }
     }
 
-    fn parse_fn_decl(parse_arg_fn: fn(parser) -> arg_or_capture_item)
+    fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item)
         -> (fn_decl, capture_clause) {
 
         let args_or_capture_items: ~[arg_or_capture_item] =
@@ -2420,11 +2420,11 @@ impl parser {
     }
 
     fn parse_fn_decl_with_self(parse_arg_fn:
-                                    fn(parser) -> arg_or_capture_item)
+                                    fn(Parser) -> arg_or_capture_item)
                             -> (self_ty, fn_decl, capture_clause) {
 
         fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_,
-                               p: parser) -> ast::self_ty_ {
+                               p: Parser) -> ast::self_ty_ {
             // We need to make sure it isn't a mode or a type
             if p.token_is_keyword(~"self", p.look_ahead(1)) ||
                 ((p.token_is_keyword(~"const", p.look_ahead(1)) ||
@@ -2604,7 +2604,7 @@ impl parser {
     // Parses four variants (with the region/type params always optional):
     //    impl<T> ~[T] : to_str { ... }
     fn parse_item_impl() -> item_info {
-        fn wrap_path(p: parser, pt: @path) -> @ty {
+        fn wrap_path(p: Parser, pt: @path) -> @Ty {
             @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
         }
 
@@ -2664,7 +2664,7 @@ impl parser {
           ref_id: self.get_id(), impl_id: self.get_id()}
     }
 
-    fn parse_trait_ref_list(ket: token::token) -> ~[@trait_ref] {
+    fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] {
         self.parse_seq_to_before_end(
             ket, seq_sep_trailing_disallowed(token::COMMA),
             |p| p.parse_trait_ref())
@@ -2756,7 +2756,7 @@ impl parser {
          None)
     }
 
-    fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
+    fn token_is_pound_or_doc_comment(++tok: token::Token) -> bool {
         match tok {
             token::POUND | token::DOC_COMMENT(_) => true,
             _ => false
@@ -2841,7 +2841,7 @@ impl parser {
         self.eat_keyword(~"static")
     }
 
-    fn parse_mod_items(term: token::token,
+    fn parse_mod_items(term: token::Token,
                        +first_item_attrs: ~[attribute]) -> _mod {
         // Shouldn't be any view items since we've already parsed an item attr
         let {attrs_remaining, view_items, items: starting_items} =
@@ -3222,7 +3222,7 @@ impl parser {
         }
     }
 
-    fn fn_expr_lookahead(tok: token::token) -> bool {
+    fn fn_expr_lookahead(tok: token::Token) -> bool {
         match tok {
           token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
           _ => false
@@ -3608,7 +3608,7 @@ impl parser {
         return self.fatal(~"expected crate directive");
     }
 
-    fn parse_crate_directives(term: token::token,
+    fn parse_crate_directives(term: token::Token,
                               first_outer_attr: ~[attribute]) ->
         ~[@crate_directive] {
 
diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs
index 668301db620..3fd905cb8ec 100644
--- a/src/libsyntax/parse/prec.rs
+++ b/src/libsyntax/parse/prec.rs
@@ -3,7 +3,7 @@ export unop_prec;
 export token_to_binop;
 
 use token::*;
-use token::token;
+use token::Token;
 use ast::*;
 
 /// Unary operators have higher precedence than binary
@@ -19,7 +19,7 @@ const as_prec: uint = 11u;
  * Maps a token to a record specifying the corresponding binary
  * operator and its precedence
  */
-fn token_to_binop(tok: token) -> Option<ast::binop> {
+fn token_to_binop(tok: Token) -> Option<ast::binop> {
   match tok {
       BINOP(STAR)    => Some(mul),
       BINOP(SLASH)   => Some(div),
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index a7d439b8ce6..5151fd1bac8 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,5 +1,5 @@
 use util::interner;
-use util::interner::interner;
+use util::interner::Interner;
 use std::map::HashMap;
 
 #[auto_serialize]
@@ -19,7 +19,7 @@ enum binop {
 
 #[auto_serialize]
 #[auto_deserialize]
-enum token {
+enum Token {
     /* Expression-operator symbols. */
     EQ,
     LT,
@@ -84,7 +84,7 @@ enum nonterminal {
     nt_stmt(@ast::stmt),
     nt_pat( @ast::pat),
     nt_expr(@ast::expr),
-    nt_ty(  @ast::ty),
+    nt_ty(  @ast::Ty),
     nt_ident(ast::ident, bool),
     nt_path(@ast::path),
     nt_tt(  @ast::token_tree), //needs @ed to break a circularity
@@ -106,7 +106,7 @@ fn binop_to_str(o: binop) -> ~str {
     }
 }
 
-fn to_str(in: @ident_interner, t: token) -> ~str {
+fn to_str(in: @ident_interner, t: Token) -> ~str {
     match t {
       EQ => ~"=",
       LT => ~"<",
@@ -192,7 +192,7 @@ fn to_str(in: @ident_interner, t: token) -> ~str {
     }
 }
 
-pure fn can_begin_expr(t: token) -> bool {
+pure fn can_begin_expr(t: Token) -> bool {
     match t {
       LPAREN => true,
       LBRACE => true,
@@ -223,7 +223,7 @@ pure fn can_begin_expr(t: token) -> bool {
 }
 
 /// what's the opposite delimiter?
-fn flip_delimiter(t: token::token) -> token::token {
+fn flip_delimiter(t: token::Token) -> token::Token {
     match t {
       token::LPAREN => token::RPAREN,
       token::LBRACE => token::RBRACE,
@@ -237,7 +237,7 @@ fn flip_delimiter(t: token::token) -> token::token {
 
 
 
-fn is_lit(t: token) -> bool {
+fn is_lit(t: Token) -> bool {
     match t {
       LIT_INT(_, _) => true,
       LIT_UINT(_, _) => true,
@@ -248,22 +248,22 @@ fn is_lit(t: token) -> bool {
     }
 }
 
-pure fn is_ident(t: token) -> bool {
+pure fn is_ident(t: Token) -> bool {
     match t { IDENT(_, _) => true, _ => false }
 }
 
-pure fn is_ident_or_path(t: token) -> bool {
+pure fn is_ident_or_path(t: Token) -> bool {
     match t {
       IDENT(_, _) | INTERPOLATED(nt_path(*)) => true,
       _ => false
     }
 }
 
-pure fn is_plain_ident(t: token) -> bool {
+pure fn is_plain_ident(t: Token) -> bool {
     match t { IDENT(_, false) => true, _ => false }
 }
 
-pure fn is_bar(t: token) -> bool {
+pure fn is_bar(t: Token) -> bool {
     match t { BINOP(OR) | OROR => true, _ => false }
 }
 
@@ -314,7 +314,7 @@ mod special_idents {
 }
 
 struct ident_interner {
-    priv interner: util::interner::interner<@~str>,
+    priv interner: util::interner::Interner<@~str>,
 }
 
 impl ident_interner {
@@ -457,8 +457,8 @@ impl binop : cmp::Eq {
     pure fn ne(other: &binop) -> bool { !self.eq(other) }
 }
 
-impl token : cmp::Eq {
-    pure fn eq(other: &token) -> bool {
+impl Token : cmp::Eq {
+    pure fn eq(other: &Token) -> bool {
         match self {
             EQ => {
                 match (*other) {
@@ -720,7 +720,7 @@ impl token : cmp::Eq {
             }
         }
     }
-    pure fn ne(other: &token) -> bool { !self.eq(other) }
+    pure fn ne(other: &Token) -> bool { !self.eq(other) }
 }
 
 // Local Variables: