about summary refs log tree commit diff
diff options
context:
space:
mode:
authorPatrick Walton <pcwalton@mimiga.net>2012-10-15 14:56:42 -0700
committerPatrick Walton <pcwalton@mimiga.net>2012-10-15 15:35:36 -0700
commit91ae5412d8141ea958924408bf3c1def5edca806 (patch)
tree65e295ba7ac7159ea3f7fe172e1a241114ef5e2d
parentc5b82a65e96cfe77e4983e78a34a7d5aa91329b4 (diff)
downloadrust-91ae5412d8141ea958924408bf3c1def5edca806.tar.gz
rust-91ae5412d8141ea958924408bf3c1def5edca806.zip
rustc: Merge module and type namespaces. r=brson
-rw-r--r--src/fuzzer/fuzzer.rs18
-rw-r--r--src/libcore/cmath.rs4
-rw-r--r--src/libcore/f32.rs2
-rw-r--r--src/libcore/f64.rs4
-rw-r--r--src/libcore/libc.rs6
-rw-r--r--src/libsyntax/ast.rs50
-rw-r--r--src/libsyntax/ast_util.rs2
-rw-r--r--src/libsyntax/codemap.rs36
-rw-r--r--src/libsyntax/diagnostic.rs19
-rw-r--r--src/libsyntax/ext/auto_serialize.rs8
-rw-r--r--src/libsyntax/ext/base.rs6
-rw-r--r--src/libsyntax/ext/pipes.rs4
-rw-r--r--src/libsyntax/ext/pipes/ast_builder.rs60
-rw-r--r--src/libsyntax/ext/pipes/check.rs4
-rw-r--r--src/libsyntax/ext/pipes/parse_proto.rs2
-rw-r--r--src/libsyntax/ext/pipes/pipec.rs8
-rw-r--r--src/libsyntax/ext/pipes/proto.rs10
-rw-r--r--src/libsyntax/ext/qquote.rs22
-rw-r--r--src/libsyntax/ext/simplext.rs4
-rw-r--r--src/libsyntax/ext/trace_macros.rs6
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs12
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs6
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs9
-rw-r--r--src/libsyntax/fold.rs4
-rw-r--r--src/libsyntax/parse.rs24
-rw-r--r--src/libsyntax/parse/attr.rs2
-rw-r--r--src/libsyntax/parse/common.rs94
-rw-r--r--src/libsyntax/parse/eval.rs2
-rw-r--r--src/libsyntax/parse/lexer.rs26
-rw-r--r--src/libsyntax/parse/obsolete.rs6
-rw-r--r--src/libsyntax/parse/parser.rs70
-rw-r--r--src/libsyntax/parse/prec.rs4
-rw-r--r--src/libsyntax/parse/token.rs30
-rw-r--r--src/libsyntax/print/pprust.rs14
-rw-r--r--src/libsyntax/util/interner.rs10
-rw-r--r--src/libsyntax/visit.rs12
-rw-r--r--src/rustc/back/link.rs26
-rw-r--r--src/rustc/back/rpath.rs4
-rw-r--r--src/rustc/driver/driver.rs29
-rw-r--r--src/rustc/driver/rustc.rs2
-rw-r--r--src/rustc/driver/session.rs16
-rw-r--r--src/rustc/front/core_inject.rs6
-rw-r--r--src/rustc/front/intrinsic_inject.rs4
-rw-r--r--src/rustc/front/test.rs14
-rw-r--r--src/rustc/metadata/creader.rs8
-rw-r--r--src/rustc/metadata/csearch.rs20
-rw-r--r--src/rustc/metadata/cstore.rs40
-rw-r--r--src/rustc/metadata/encoder.rs8
-rw-r--r--src/rustc/metadata/filesearch.rs12
-rw-r--r--src/rustc/metadata/loader.rs6
-rw-r--r--src/rustc/metadata/tydecode.rs2
-rw-r--r--src/rustc/metadata/tyencode.rs2
-rw-r--r--src/rustc/middle/astencode.rs8
-rw-r--r--src/rustc/middle/borrowck.rs7
-rw-r--r--src/rustc/middle/borrowck/gather_loans.rs4
-rw-r--r--src/rustc/middle/borrowck/loan.rs6
-rw-r--r--src/rustc/middle/borrowck/preserve.rs6
-rw-r--r--src/rustc/middle/capture.rs1
-rw-r--r--src/rustc/middle/check_alt.rs1
-rw-r--r--src/rustc/middle/check_const.rs12
-rw-r--r--src/rustc/middle/check_loop.rs1
-rw-r--r--src/rustc/middle/kind.rs7
-rw-r--r--src/rustc/middle/lang_items.rs12
-rw-r--r--src/rustc/middle/lint.rs6
-rw-r--r--src/rustc/middle/liveness.rs1
-rw-r--r--src/rustc/middle/mem_categorization.rs4
-rw-r--r--src/rustc/middle/region.rs16
-rw-r--r--src/rustc/middle/resolve.rs458
-rw-r--r--src/rustc/middle/trans/alt.rs1
-rw-r--r--src/rustc/middle/trans/base.rs6
-rw-r--r--src/rustc/middle/trans/build.rs1
-rw-r--r--src/rustc/middle/trans/common.rs8
-rw-r--r--src/rustc/middle/trans/debuginfo.rs8
-rw-r--r--src/rustc/middle/trans/foreign.rs2
-rw-r--r--src/rustc/middle/trans/reachable.rs2
-rw-r--r--src/rustc/middle/trans/reflect.rs1
-rw-r--r--src/rustc/middle/trans/tvec.rs1
-rw-r--r--src/rustc/middle/trans/type_use.rs1
-rw-r--r--src/rustc/middle/ty.rs144
-rw-r--r--src/rustc/middle/typeck.rs1
-rw-r--r--src/rustc/middle/typeck/astconv.rs6
-rw-r--r--src/rustc/middle/typeck/check.rs34
-rw-r--r--src/rustc/middle/typeck/check/alt.rs4
-rw-r--r--src/rustc/middle/typeck/check/method.rs4
-rw-r--r--src/rustc/middle/typeck/check/regionck.rs12
-rw-r--r--src/rustc/middle/typeck/check/regionmanip.rs8
-rw-r--r--src/rustc/middle/typeck/coherence.rs6
-rw-r--r--src/rustc/middle/typeck/collect.rs4
-rw-r--r--src/rustc/middle/typeck/infer.rs15
-rw-r--r--src/rustc/middle/typeck/infer/assignment.rs4
-rw-r--r--src/rustc/middle/typeck/infer/combine.rs18
-rw-r--r--src/rustc/middle/typeck/infer/glb.rs6
-rw-r--r--src/rustc/middle/typeck/infer/integral.rs2
-rw-r--r--src/rustc/middle/typeck/infer/lattice.rs2
-rw-r--r--src/rustc/middle/typeck/infer/lub.rs6
-rw-r--r--src/rustc/middle/typeck/infer/region_var_bindings.rs58
-rw-r--r--src/rustc/middle/typeck/infer/resolve.rs10
-rw-r--r--src/rustc/middle/typeck/infer/sub.rs6
-rw-r--r--src/rustc/middle/typeck/infer/to_str.rs16
-rw-r--r--src/rustc/middle/typeck/infer/unify.rs4
-rw-r--r--src/rustc/middle/typeck/rscope.rs36
-rw-r--r--src/rustc/util/common.rs1
-rw-r--r--src/rustc/util/ppaux.rs13
-rw-r--r--src/rustdoc/astsrv.rs14
-rw-r--r--src/rustdoc/parse.rs6
-rw-r--r--src/test/run-pass/issue-2930.rs4
-rw-r--r--src/test/run-pass/pipe-select.rs4
107 files changed, 877 insertions, 951 deletions
diff --git a/src/fuzzer/fuzzer.rs b/src/fuzzer/fuzzer.rs
index 3e31287e3cd..018972d4c3e 100644
--- a/src/fuzzer/fuzzer.rs
+++ b/src/fuzzer/fuzzer.rs
@@ -104,7 +104,7 @@ pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool {
     }
 }
 
-fn safe_to_steal_ty(t: @ast::ty, tm: test_mode) -> bool {
+fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool {
     // Restrictions happen to be the same.
     safe_to_replace_ty(t.node, tm)
 }
@@ -119,16 +119,16 @@ fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool,
     } else {/* now my indices are wrong :( */ }
 }
 
-fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool,
-               es: @mut ~[ast::ty],
-               e: @ast::ty,
+fn stash_ty_if(c: fn@(@ast::Ty, test_mode)->bool,
+               es: @mut ~[ast::Ty],
+               e: @ast::Ty,
                tm: test_mode) {
     if c(e, tm) {
         es.push(*e);
     } else {/* now my indices are wrong :( */ }
 }
 
-type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::ty]};
+type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::Ty]};
 
 fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff {
     let exprs = @mut ~[];
@@ -195,7 +195,7 @@ fn replace_expr_in_crate(crate: ast::crate, i: uint,
 
 
 // Replace the |i|th ty (in fold order) of |crate| with |newty|.
-fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::ty,
+fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty,
                        tm: test_mode) -> ast::crate {
     let j: @mut uint = @mut 0u;
     fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_,
@@ -225,7 +225,7 @@ fn as_str(f: fn@(+x: io::Writer)) -> ~str {
     io::with_str_writer(f)
 }
 
-fn check_variants_of_ast(crate: ast::crate, codemap: codemap::codemap,
+fn check_variants_of_ast(crate: ast::crate, codemap: codemap::CodeMap,
                          filename: &Path, cx: context) {
     let stolen = steal(crate, cx.mode);
     let extra_exprs = vec::filter(common_exprs(),
@@ -239,7 +239,7 @@ fn check_variants_of_ast(crate: ast::crate, codemap: codemap::codemap,
 
 fn check_variants_T<T: Copy>(
   crate: ast::crate,
-  codemap: codemap::codemap,
+  codemap: codemap::CodeMap,
   filename: &Path,
   thing_label: ~str,
   things: ~[T],
@@ -444,7 +444,7 @@ fn parse_and_print(code: @~str) -> ~str {
 
 fn has_raw_pointers(c: ast::crate) -> bool {
     let has_rp = @mut false;
-    fn visit_ty(flag: @mut bool, t: @ast::ty) {
+    fn visit_ty(flag: @mut bool, t: @ast::Ty) {
         match t.node {
           ast::ty_ptr(_) => { *flag = true; }
           _ => { }
diff --git a/src/libcore/cmath.rs b/src/libcore/cmath.rs
index b0aeb78afaa..46ac90413a6 100644
--- a/src/libcore/cmath.rs
+++ b/src/libcore/cmath.rs
@@ -12,7 +12,7 @@ use libc::c_double;
 
 #[link_name = "m"]
 #[abi = "cdecl"]
-pub extern mod c_double {
+pub extern mod c_double_utils {
 
     // Alpabetically sorted by link_name
 
@@ -87,7 +87,7 @@ pub extern mod c_double {
 
 #[link_name = "m"]
 #[abi = "cdecl"]
-pub extern mod c_float {
+pub extern mod c_float_utils {
 
     // Alpabetically sorted by link_name
 
diff --git a/src/libcore/f32.rs b/src/libcore/f32.rs
index ec0e66734fa..ed6908d110d 100644
--- a/src/libcore/f32.rs
+++ b/src/libcore/f32.rs
@@ -4,7 +4,7 @@
 
 //! Operations and constants for `f32`
 
-pub use cmath::c_float::*;
+pub use cmath::c_float_utils::*;
 pub use cmath::c_float_targ_consts::*;
 
 // These are not defined inside consts:: for consistency with
diff --git a/src/libcore/f64.rs b/src/libcore/f64.rs
index 731d369649b..2d13dc86e2f 100644
--- a/src/libcore/f64.rs
+++ b/src/libcore/f64.rs
@@ -4,7 +4,7 @@
 
 //! Operations and constants for `f64`
 
-pub use cmath::c_double::*;
+pub use cmath::c_double_utils::*;
 pub use cmath::c_double_targ_consts::*;
 
 // FIXME (#1433): obtain these in a different way
@@ -59,7 +59,7 @@ pub pure fn ge(x: f64, y: f64) -> bool { return x >= y; }
 pub pure fn gt(x: f64, y: f64) -> bool { return x > y; }
 
 pub pure fn sqrt(x: f64) -> f64 {
-    cmath::c_double::sqrt(x as libc::c_double) as f64
+    cmath::c_double_utils::sqrt(x as libc::c_double) as f64
 }
 
 /// Returns true if `x` is a positive number, including +0.0f640 and +Infinity
diff --git a/src/libcore/libc.rs b/src/libcore/libc.rs
index dd8f76c89d5..7ed43f619e1 100644
--- a/src/libcore/libc.rs
+++ b/src/libcore/libc.rs
@@ -87,7 +87,7 @@ pub use funcs::extra::*;
 
 pub use size_t;
 pub use c_float, c_double, c_void, FILE, fpos_t;
-pub use DIR, dirent;
+pub use DIR, dirent_t;
 pub use c_char, c_schar, c_uchar;
 pub use c_short, c_ushort, c_int, c_uint, c_long, c_ulong;
 pub use size_t, ptrdiff_t, clock_t, time_t;
@@ -147,7 +147,7 @@ mod types {
         }
         pub mod posix88 {
             pub enum DIR {}
-            pub enum dirent {}
+            pub enum dirent_t {}
         }
         pub mod posix01 {}
         pub mod posix08 {}
@@ -1019,7 +1019,7 @@ pub mod funcs {
         pub extern mod dirent {
             fn opendir(dirname: *c_char) -> *DIR;
             fn closedir(dirp: *DIR) -> c_int;
-            fn readdir(dirp: *DIR) -> *dirent;
+            fn readdir(dirp: *DIR) -> *dirent_t;
             fn rewinddir(dirp: *DIR);
             fn seekdir(dirp: *DIR, loc: c_long);
             fn telldir(dirp: *DIR) -> c_long;
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index cf7b758216b..e3da15d181b 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -129,7 +129,7 @@ type path = {span: span,
              global: bool,
              idents: ~[ident],
              rp: Option<@region>,
-             types: ~[@ty]};
+             types: ~[@Ty]};
 
 type crate_num = int;
 
@@ -156,7 +156,7 @@ enum ty_param_bound {
     bound_send,
     bound_const,
     bound_owned,
-    bound_trait(@ty),
+    bound_trait(@Ty),
 }
 
 #[auto_serialize]
@@ -702,7 +702,7 @@ type initializer = {op: init_op, expr: @expr};
 // a refinement on pat.
 #[auto_serialize]
 #[auto_deserialize]
-type local_ =  {is_mutbl: bool, ty: @ty, pat: @pat,
+type local_ =  {is_mutbl: bool, ty: @Ty, pat: @pat,
                 init: Option<initializer>, id: node_id};
 
 type local = spanned<local_>;
@@ -764,7 +764,7 @@ enum expr_ {
     expr_binary(binop, @expr, @expr),
     expr_unary(unop, @expr),
     expr_lit(@lit),
-    expr_cast(@expr, @ty),
+    expr_cast(@expr, @Ty),
     expr_if(@expr, blk, Option<@expr>),
     expr_while(@expr, blk),
     /* Conditionless loop (can be exited with break, cont, ret, or fail)
@@ -788,7 +788,7 @@ enum expr_ {
     expr_assign(@expr, @expr),
     expr_swap(@expr, @expr),
     expr_assign_op(binop, @expr, @expr),
-    expr_field(@expr, ident, ~[@ty]),
+    expr_field(@expr, ident, ~[@Ty]),
     expr_index(@expr, @expr),
     expr_path(@path),
     expr_addr_of(mutability, @expr),
@@ -843,10 +843,10 @@ type capture_clause = @~[capture_item];
 #[auto_deserialize]
 #[doc="For macro invocations; parsing is delegated to the macro"]
 enum token_tree {
-    tt_tok(span, token::token),
+    tt_tok(span, token::Token),
     tt_delim(~[token_tree]),
     // These only make sense for right-hand-sides of MBE macros
-    tt_seq(span, ~[token_tree], Option<token::token>, bool),
+    tt_seq(span, ~[token_tree], Option<token::Token>, bool),
     tt_nonterminal(span, ident)
 }
 
@@ -908,10 +908,10 @@ type matcher = spanned<matcher_>;
 #[auto_deserialize]
 enum matcher_ {
     // match one token
-    match_tok(token::token),
+    match_tok(token::Token),
     // match repetitions of a sequence: body, separator, zero ok?,
     // lo, hi position-in-match-array used:
-    match_seq(~[matcher], Option<token::token>, bool, uint, uint),
+    match_seq(~[matcher], Option<token::Token>, bool, uint, uint),
     // parse a Rust NT: name to bind, name of NT, position in match array:
     match_nonterminal(ident, ident, uint)
 }
@@ -984,7 +984,7 @@ impl ast::lit_: cmp::Eq {
 // type structure in middle/ty.rs as well.
 #[auto_serialize]
 #[auto_deserialize]
-type mt = {ty: @ty, mutbl: mutability};
+type mt = {ty: @Ty, mutbl: mutability};
 
 #[auto_serialize]
 #[auto_deserialize]
@@ -1087,7 +1087,7 @@ impl float_ty : cmp::Eq {
 
 #[auto_serialize]
 #[auto_deserialize]
-type ty = {id: node_id, node: ty_, span: span};
+type Ty = {id: node_id, node: ty_, span: span};
 
 // Not represented directly in the AST, referred to by name through a ty_path.
 #[auto_serialize]
@@ -1163,9 +1163,9 @@ enum ty_ {
     ty_rptr(@region, mt),
     ty_rec(~[ty_field]),
     ty_fn(proto, purity, @~[ty_param_bound], fn_decl),
-    ty_tup(~[@ty]),
+    ty_tup(~[@Ty]),
     ty_path(@path, node_id),
-    ty_fixed_length(@ty, Option<uint>),
+    ty_fixed_length(@Ty, Option<uint>),
     ty_mac(mac),
     // ty_infer means the type should be inferred instead of it having been
     // specified. This should only appear at the "top level" of a type and not
@@ -1175,16 +1175,16 @@ enum ty_ {
 
 // Equality and byte-iter (hashing) can be quite approximate for AST types.
 // since we only care about this for normalizing them to "real" types.
-impl ty : cmp::Eq {
-    pure fn eq(other: &ty) -> bool {
+impl Ty : cmp::Eq {
+    pure fn eq(other: &Ty) -> bool {
         ptr::addr_of(&self) == ptr::addr_of(&(*other))
     }
-    pure fn ne(other: &ty) -> bool {
+    pure fn ne(other: &Ty) -> bool {
         ptr::addr_of(&self) != ptr::addr_of(&(*other))
     }
 }
 
-impl ty : to_bytes::IterBytes {
+impl Ty : to_bytes::IterBytes {
     pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
         to_bytes::iter_bytes_2(&self.span.lo, &self.span.hi, lsb0, f);
     }
@@ -1193,13 +1193,13 @@ impl ty : to_bytes::IterBytes {
 
 #[auto_serialize]
 #[auto_deserialize]
-type arg = {mode: mode, ty: @ty, ident: ident, id: node_id};
+type arg = {mode: mode, ty: @Ty, ident: ident, id: node_id};
 
 #[auto_serialize]
 #[auto_deserialize]
 type fn_decl =
     {inputs: ~[arg],
-     output: @ty,
+     output: @Ty,
      cf: ret_style};
 
 #[auto_serialize]
@@ -1362,7 +1362,7 @@ type foreign_mod =
 
 #[auto_serialize]
 #[auto_deserialize]
-type variant_arg = {ty: @ty, id: node_id};
+type variant_arg = {ty: @Ty, id: node_id};
 
 #[auto_serialize]
 #[auto_deserialize]
@@ -1495,7 +1495,7 @@ impl visibility : cmp::Eq {
 type struct_field_ = {
     kind: struct_field_kind,
     id: node_id,
-    ty: @ty
+    ty: @Ty
 };
 
 type struct_field = spanned<struct_field_>;
@@ -1531,17 +1531,17 @@ type item = {ident: ident, attrs: ~[attribute],
 #[auto_serialize]
 #[auto_deserialize]
 enum item_ {
-    item_const(@ty, @expr),
+    item_const(@Ty, @expr),
     item_fn(fn_decl, purity, ~[ty_param], blk),
     item_mod(_mod),
     item_foreign_mod(foreign_mod),
-    item_ty(@ty, ~[ty_param]),
+    item_ty(@Ty, ~[ty_param]),
     item_enum(enum_def, ~[ty_param]),
     item_class(@struct_def, ~[ty_param]),
     item_trait(~[ty_param], ~[@trait_ref], ~[trait_method]),
     item_impl(~[ty_param],
               Option<@trait_ref>, /* (optional) trait this impl implements */
-              @ty, /* self */
+              @Ty, /* self */
               ~[@method]),
     item_mac(mac),
 }
@@ -1601,7 +1601,7 @@ type foreign_item =
 #[auto_deserialize]
 enum foreign_item_ {
     foreign_item_fn(fn_decl, purity, ~[ty_param]),
-    foreign_item_const(@ty)
+    foreign_item_const(@Ty)
 }
 
 // The data we save and restore about an inlined item or method.  This is not
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index 4c18b6b8eca..6fd84c3317f 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -471,7 +471,7 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
         visit_expr_post: fn@(_e: @expr) {
         },
 
-        visit_ty: fn@(t: @ty) {
+        visit_ty: fn@(t: @Ty) {
             match t.node {
               ty_path(_, id) => vfn(id),
               _ => { /* fall through */ }
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index e07985119ec..0cb8b425c94 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -7,7 +7,7 @@ export file_substr;
 export fss_none;
 export fss_internal;
 export fss_external;
-export codemap;
+export CodeMap;
 export expn_info;
 export expn_info_;
 export expanded_from;
@@ -55,11 +55,11 @@ type filemap =
     @{name: filename, substr: file_substr, src: @~str,
       start_pos: file_pos, mut lines: ~[file_pos]};
 
-type codemap = @{files: DVec<filemap>};
+type CodeMap = @{files: DVec<filemap>};
 
 type loc = {file: filemap, line: uint, col: uint};
 
-fn new_codemap() -> codemap { @{files: DVec()} }
+fn new_codemap() -> CodeMap { @{files: DVec()} }
 
 fn new_filemap_w_substr(+filename: filename, +substr: file_substr,
                         src: @~str,
@@ -77,7 +77,7 @@ fn new_filemap(+filename: filename, src: @~str,
                              start_pos_ch, start_pos_byte);
 }
 
-fn mk_substr_filename(cm: codemap, sp: span) -> ~str
+fn mk_substr_filename(cm: CodeMap, sp: span) -> ~str
 {
     let pos = lookup_char_pos(cm, sp.lo);
     return fmt!("<%s:%u:%u>", pos.file.name, pos.line, pos.col);
@@ -89,7 +89,7 @@ fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
 
 type lookup_fn = pure fn(file_pos) -> uint;
 
-fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn)
+fn lookup_line(map: CodeMap, pos: uint, lookup: lookup_fn)
     -> {fm: filemap, line: uint}
 {
     let len = map.files.len();
@@ -112,22 +112,22 @@ fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn)
     return {fm: f, line: a};
 }
 
-fn lookup_pos(map: codemap, pos: uint, lookup: lookup_fn) -> loc {
+fn lookup_pos(map: CodeMap, pos: uint, lookup: lookup_fn) -> loc {
     let {fm: f, line: a} = lookup_line(map, pos, lookup);
     return {file: f, line: a + 1u, col: pos - lookup(f.lines[a])};
 }
 
-fn lookup_char_pos(map: codemap, pos: uint) -> loc {
+fn lookup_char_pos(map: CodeMap, pos: uint) -> loc {
     pure fn lookup(pos: file_pos) -> uint { return pos.ch; }
     return lookup_pos(map, pos, lookup);
 }
 
-fn lookup_byte_pos(map: codemap, pos: uint) -> loc {
+fn lookup_byte_pos(map: CodeMap, pos: uint) -> loc {
     pure fn lookup(pos: file_pos) -> uint { return pos.byte; }
     return lookup_pos(map, pos, lookup);
 }
 
-fn lookup_char_pos_adj(map: codemap, pos: uint)
+fn lookup_char_pos_adj(map: CodeMap, pos: uint)
     -> {filename: ~str, line: uint, col: uint, file: Option<filemap>}
 {
     let loc = lookup_char_pos(map, pos);
@@ -150,7 +150,7 @@ fn lookup_char_pos_adj(map: codemap, pos: uint)
     }
 }
 
-fn adjust_span(map: codemap, sp: span) -> span {
+fn adjust_span(map: CodeMap, sp: span) -> span {
     pure fn lookup(pos: file_pos) -> uint { return pos.ch; }
     let line = lookup_line(map, sp.lo, lookup);
     match (line.fm.substr) {
@@ -178,14 +178,14 @@ impl span : cmp::Eq {
     pure fn ne(other: &span) -> bool { !self.eq(other) }
 }
 
-fn span_to_str_no_adj(sp: span, cm: codemap) -> ~str {
+fn span_to_str_no_adj(sp: span, cm: CodeMap) -> ~str {
     let lo = lookup_char_pos(cm, sp.lo);
     let hi = lookup_char_pos(cm, sp.hi);
     return fmt!("%s:%u:%u: %u:%u", lo.file.name,
              lo.line, lo.col, hi.line, hi.col)
 }
 
-fn span_to_str(sp: span, cm: codemap) -> ~str {
+fn span_to_str(sp: span, cm: CodeMap) -> ~str {
     let lo = lookup_char_pos_adj(cm, sp.lo);
     let hi = lookup_char_pos_adj(cm, sp.hi);
     return fmt!("%s:%u:%u: %u:%u", lo.filename,
@@ -194,12 +194,12 @@ fn span_to_str(sp: span, cm: codemap) -> ~str {
 
 type file_lines = {file: filemap, lines: ~[uint]};
 
-fn span_to_filename(sp: span, cm: codemap::codemap) -> filename {
+fn span_to_filename(sp: span, cm: codemap::CodeMap) -> filename {
     let lo = lookup_char_pos(cm, sp.lo);
     return /* FIXME (#2543) */ copy lo.file.name;
 }
 
-fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
+fn span_to_lines(sp: span, cm: codemap::CodeMap) -> @file_lines {
     let lo = lookup_char_pos(cm, sp.lo);
     let hi = lookup_char_pos(cm, sp.hi);
     let mut lines = ~[];
@@ -218,7 +218,7 @@ fn get_line(fm: filemap, line: int) -> ~str unsafe {
     str::slice(*fm.src, begin, end)
 }
 
-fn lookup_byte_offset(cm: codemap::codemap, chpos: uint)
+fn lookup_byte_offset(cm: codemap::CodeMap, chpos: uint)
     -> {fm: filemap, pos: uint} {
     pure fn lookup(pos: file_pos) -> uint { return pos.ch; }
     let {fm, line} = lookup_line(cm, chpos, lookup);
@@ -228,20 +228,20 @@ fn lookup_byte_offset(cm: codemap::codemap, chpos: uint)
     {fm: fm, pos: line_offset + col_offset}
 }
 
-fn span_to_snippet(sp: span, cm: codemap::codemap) -> ~str {
+fn span_to_snippet(sp: span, cm: codemap::CodeMap) -> ~str {
     let begin = lookup_byte_offset(cm, sp.lo);
     let end = lookup_byte_offset(cm, sp.hi);
     assert begin.fm.start_pos == end.fm.start_pos;
     return str::slice(*begin.fm.src, begin.pos, end.pos);
 }
 
-fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> ~str
+fn get_snippet(cm: codemap::CodeMap, fidx: uint, lo: uint, hi: uint) -> ~str
 {
     let fm = cm.files[fidx];
     return str::slice(*fm.src, lo, hi)
 }
 
-fn get_filemap(cm: codemap, filename: ~str) -> filemap {
+fn get_filemap(cm: CodeMap, filename: ~str) -> filemap {
     for cm.files.each |fm| { if fm.name == filename { return *fm; } }
     //XXjdm the following triggers a mismatched type bug
     //      (or expected function, found _|_)
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 2addb3d9e12..855b0ca3ef5 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -9,7 +9,7 @@ export codemap_span_handler, codemap_handler;
 export ice_msg;
 export expect;
 
-type emitter = fn@(cmsp: Option<(codemap::codemap, span)>,
+type emitter = fn@(cmsp: Option<(codemap::CodeMap, span)>,
                    msg: &str, lvl: level);
 
 
@@ -33,7 +33,7 @@ trait handler {
     fn note(msg: &str);
     fn bug(msg: &str) -> !;
     fn unimpl(msg: &str) -> !;
-    fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level);
+    fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level);
 }
 
 type handler_t = @{
@@ -43,7 +43,7 @@ type handler_t = @{
 
 type codemap_t = @{
     handler: handler,
-    cm: codemap::codemap
+    cm: codemap::CodeMap
 };
 
 impl codemap_t: span_handler {
@@ -107,7 +107,7 @@ impl handler_t: handler {
         self.fatal(ice_msg(msg));
     }
     fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); }
-    fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level) {
+    fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) {
         self.emit(cmsp, msg, lvl);
     }
 }
@@ -116,7 +116,7 @@ fn ice_msg(msg: &str) -> ~str {
     fmt!("internal compiler error: %s", msg)
 }
 
-fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler {
+fn mk_span_handler(handler: handler, cm: codemap::CodeMap) -> span_handler {
     @{ handler: handler, cm: cm } as span_handler
 }
 
@@ -125,7 +125,7 @@ fn mk_handler(emitter: Option<emitter>) -> handler {
     let emit = match emitter {
       Some(e) => e,
       None => {
-        let f = fn@(cmsp: Option<(codemap::codemap, span)>,
+        let f = fn@(cmsp: Option<(codemap::CodeMap, span)>,
             msg: &str, t: level) {
             emit(cmsp, msg, t);
         };
@@ -189,8 +189,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: &str) {
     io::stderr().write_str(fmt!(" %s\n", msg));
 }
 
-fn emit(cmsp: Option<(codemap::codemap, span)>,
-        msg: &str, lvl: level) {
+fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) {
     match cmsp {
       Some((cm, sp)) => {
         let sp = codemap::adjust_span(cm,sp);
@@ -206,7 +205,7 @@ fn emit(cmsp: Option<(codemap::codemap, span)>,
     }
 }
 
-fn highlight_lines(cm: codemap::codemap, sp: span,
+fn highlight_lines(cm: codemap::CodeMap, sp: span,
                    lines: @codemap::file_lines) {
 
     let fm = lines.file;
@@ -261,7 +260,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
     }
 }
 
-fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
+fn print_macro_backtrace(cm: codemap::CodeMap, sp: span) {
     do option::iter(&sp.expn_info) |ei| {
         let ss = option::map_default(&ei.callie.span, @~"",
                                      |span| @codemap::span_to_str(*span, cm));
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs
index 452becbe559..7e5e68ffff9 100644
--- a/src/libsyntax/ext/auto_serialize.rs
+++ b/src/libsyntax/ext/auto_serialize.rs
@@ -250,12 +250,12 @@ priv impl ext_ctxt {
     }
 
     fn path_tps(span: span, strs: ~[ast::ident],
-                tps: ~[@ast::ty]) -> @ast::path {
+                tps: ~[@ast::Ty]) -> @ast::path {
         @{span: span, global: false, idents: strs, rp: None, types: tps}
     }
 
     fn ty_path(span: span, strs: ~[ast::ident],
-               tps: ~[@ast::ty]) -> @ast::ty {
+               tps: ~[@ast::Ty]) -> @ast::Ty {
         @{id: self.next_id(),
           node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()),
           span: span}
@@ -360,7 +360,7 @@ fn mk_impl(
     ty_param: ast::ty_param,
     path: @ast::path,
     tps: ~[ast::ty_param],
-    f: fn(@ast::ty) -> @ast::method
+    f: fn(@ast::Ty) -> @ast::method
 ) -> @ast::item {
     // All the type parameters need to bound to the trait.
     let mut trait_tps = vec::append(
@@ -549,7 +549,7 @@ fn mk_ser_method(
 fn mk_deser_method(
     cx: ext_ctxt,
     span: span,
-    ty: @ast::ty,
+    ty: @ast::Ty,
     deser_body: ast::blk
 ) -> @ast::method {
     let ty_d = @{
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 5894758cd85..5b4cc23ce09 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -1,7 +1,7 @@
 use std::map::HashMap;
 use parse::parser;
 use diagnostic::span_handler;
-use codemap::{codemap, span, expn_info, expanded_from};
+use codemap::{CodeMap, span, expn_info, expanded_from};
 
 // obsolete old-style #macro code:
 //
@@ -124,7 +124,7 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
 // when a macro expansion occurs, the resulting nodes have the backtrace()
 // -> expn_info of their expansion context stored into their span.
 trait ext_ctxt {
-    fn codemap() -> codemap;
+    fn codemap() -> CodeMap;
     fn parse_sess() -> parse::parse_sess;
     fn cfg() -> ast::crate_cfg;
     fn print_backtrace();
@@ -156,7 +156,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
                       mut mod_path: ~[ast::ident],
                       mut trace_mac: bool};
     impl ctxt_repr: ext_ctxt {
-        fn codemap() -> codemap { self.parse_sess.cm }
+        fn codemap() -> CodeMap { self.parse_sess.cm }
         fn parse_sess() -> parse::parse_sess { self.parse_sess }
         fn cfg() -> ast::crate_cfg { self.cfg }
         fn print_backtrace() { }
diff --git a/src/libsyntax/ext/pipes.rs b/src/libsyntax/ext/pipes.rs
index ad4984c5558..4d04552bfa1 100644
--- a/src/libsyntax/ext/pipes.rs
+++ b/src/libsyntax/ext/pipes.rs
@@ -37,7 +37,7 @@ use codemap::span;
 use ext::base::ext_ctxt;
 use ast::tt_delim;
 use parse::lexer::{new_tt_reader, reader};
-use parse::parser::{parser, SOURCE_FILE};
+use parse::parser::{Parser, SOURCE_FILE};
 use parse::common::parser_common;
 
 use pipes::parse_proto::proto_parser;
@@ -52,7 +52,7 @@ fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
     let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
                                cx.parse_sess().interner, None, tt);
     let rdr = tt_rdr as reader;
-    let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
+    let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE);
 
     let proto = rust_parser.parse_proto(cx.str_of(id));
 
diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs
index 4da9992b0dd..f10cbc2a589 100644
--- a/src/libsyntax/ext/pipes/ast_builder.rs
+++ b/src/libsyntax/ext/pipes/ast_builder.rs
@@ -28,17 +28,17 @@ fn empty_span() -> span {
 }
 
 trait append_types {
-    fn add_ty(ty: @ast::ty) -> @ast::path;
-    fn add_tys(+tys: ~[@ast::ty]) -> @ast::path;
+    fn add_ty(ty: @ast::Ty) -> @ast::path;
+    fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path;
 }
 
 impl @ast::path: append_types {
-    fn add_ty(ty: @ast::ty) -> @ast::path {
+    fn add_ty(ty: @ast::Ty) -> @ast::path {
         @{types: vec::append_one(self.types, ty),
           .. *self}
     }
 
-    fn add_tys(+tys: ~[@ast::ty]) -> @ast::path {
+    fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path {
         @{types: vec::append(self.types, tys),
           .. *self}
     }
@@ -47,18 +47,18 @@ impl @ast::path: append_types {
 trait ext_ctxt_ast_builder {
     fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound])
         -> ast::ty_param;
-    fn arg(name: ident, ty: @ast::ty) -> ast::arg;
+    fn arg(name: ident, ty: @ast::Ty) -> ast::arg;
     fn expr_block(e: @ast::expr) -> ast::blk;
-    fn fn_decl(+inputs: ~[ast::arg], output: @ast::ty) -> ast::fn_decl;
+    fn fn_decl(+inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl;
     fn item(name: ident, span: span, +node: ast::item_) -> @ast::item;
     fn item_fn_poly(name: ident,
                     +inputs: ~[ast::arg],
-                    output: @ast::ty,
+                    output: @ast::Ty,
                     +ty_params: ~[ast::ty_param],
                     +body: ast::blk) -> @ast::item;
     fn item_fn(name: ident,
                +inputs: ~[ast::arg],
-               output: @ast::ty,
+               output: @ast::Ty,
                +body: ast::blk) -> @ast::item;
     fn item_enum_poly(name: ident,
                       span: span,
@@ -66,17 +66,17 @@ trait ext_ctxt_ast_builder {
                       +ty_params: ~[ast::ty_param]) -> @ast::item;
     fn item_enum(name: ident, span: span,
                  +enum_definition: ast::enum_def) -> @ast::item;
-    fn variant(name: ident, span: span, +tys: ~[@ast::ty]) -> ast::variant;
+    fn variant(name: ident, span: span, +tys: ~[@ast::Ty]) -> ast::variant;
     fn item_mod(name: ident, span: span, +items: ~[@ast::item]) -> @ast::item;
-    fn ty_path_ast_builder(path: @ast::path) -> @ast::ty;
+    fn ty_path_ast_builder(path: @ast::path) -> @ast::Ty;
     fn item_ty_poly(name: ident,
                     span: span,
-                    ty: @ast::ty,
+                    ty: @ast::Ty,
                     +params: ~[ast::ty_param]) -> @ast::item;
-    fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item;
-    fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty];
-    fn ty_field_imm(name: ident, ty: @ast::ty) -> ast::ty_field;
-    fn ty_rec(+v: ~[ast::ty_field]) -> @ast::ty;
+    fn item_ty(name: ident, span: span, ty: @ast::Ty) -> @ast::item;
+    fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty];
+    fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field;
+    fn ty_rec(+v: ~[ast::ty_field]) -> @ast::Ty;
     fn field_imm(name: ident, e: @ast::expr) -> ast::field;
     fn rec(+v: ~[ast::field]) -> @ast::expr;
     fn block(+stmts: ~[@ast::stmt], e: @ast::expr) -> ast::blk;
@@ -84,11 +84,11 @@ trait ext_ctxt_ast_builder {
     fn stmt_expr(e: @ast::expr) -> @ast::stmt;
     fn block_expr(b: ast::blk) -> @ast::expr;
     fn empty_span() -> span;
-    fn ty_option(ty: @ast::ty) -> @ast::ty;
+    fn ty_option(ty: @ast::Ty) -> @ast::Ty;
 }
 
 impl ext_ctxt: ext_ctxt_ast_builder {
-    fn ty_option(ty: @ast::ty) -> @ast::ty {
+    fn ty_option(ty: @ast::Ty) -> @ast::Ty {
         self.ty_path_ast_builder(path(~[self.ident_of(~"Option")],
                                       self.empty_span())
                                  .add_ty(ty))
@@ -146,18 +146,18 @@ impl ext_ctxt: ext_ctxt_ast_builder {
           span: self.empty_span()}
     }
 
-    fn ty_field_imm(name: ident, ty: @ast::ty) -> ast::ty_field {
+    fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field {
         {node: {ident: name, mt: { ty: ty, mutbl: ast::m_imm } },
           span: self.empty_span()}
     }
 
-    fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::ty {
+    fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::Ty {
         @{id: self.next_id(),
           node: ast::ty_rec(fields),
           span: self.empty_span()}
     }
 
-    fn ty_infer() -> @ast::ty {
+    fn ty_infer() -> @ast::Ty {
         @{id: self.next_id(),
           node: ast::ty_infer,
           span: self.empty_span()}
@@ -169,7 +169,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
         {ident: id, id: self.next_id(), bounds: @bounds}
     }
 
-    fn arg(name: ident, ty: @ast::ty) -> ast::arg {
+    fn arg(name: ident, ty: @ast::Ty) -> ast::arg {
         {mode: ast::infer(self.next_id()),
          ty: ty,
          ident: name,
@@ -192,7 +192,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
     }
 
     fn fn_decl(+inputs: ~[ast::arg],
-               output: @ast::ty) -> ast::fn_decl {
+               output: @ast::Ty) -> ast::fn_decl {
         {inputs: inputs,
          output: output,
          cf: ast::return_val}
@@ -224,7 +224,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
 
     fn item_fn_poly(name: ident,
                     +inputs: ~[ast::arg],
-                    output: @ast::ty,
+                    output: @ast::Ty,
                     +ty_params: ~[ast::ty_param],
                     +body: ast::blk) -> @ast::item {
         self.item(name,
@@ -237,7 +237,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
 
     fn item_fn(name: ident,
                +inputs: ~[ast::arg],
-               output: @ast::ty,
+               output: @ast::Ty,
                +body: ast::blk) -> @ast::item {
         self.item_fn_poly(name, inputs, output, ~[], body)
     }
@@ -256,7 +256,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
 
     fn variant(name: ident,
                span: span,
-               +tys: ~[@ast::ty]) -> ast::variant {
+               +tys: ~[@ast::Ty]) -> ast::variant {
         let args = tys.map(|ty| {ty: *ty, id: self.next_id()});
 
         {node: {name: name,
@@ -278,13 +278,13 @@ impl ext_ctxt: ext_ctxt_ast_builder {
                       items: items}))
     }
 
-    fn ty_path_ast_builder(path: @ast::path) -> @ast::ty {
+    fn ty_path_ast_builder(path: @ast::path) -> @ast::Ty {
         @{id: self.next_id(),
           node: ast::ty_path(path, self.next_id()),
           span: path.span}
     }
 
-    fn ty_nil_ast_builder() -> @ast::ty {
+    fn ty_nil_ast_builder() -> @ast::Ty {
         @{id: self.next_id(),
           node: ast::ty_nil,
           span: self.empty_span()}
@@ -292,16 +292,16 @@ impl ext_ctxt: ext_ctxt_ast_builder {
 
     fn item_ty_poly(name: ident,
                     span: span,
-                    ty: @ast::ty,
+                    ty: @ast::Ty,
                     +params: ~[ast::ty_param]) -> @ast::item {
         self.item(name, span, ast::item_ty(ty, params))
     }
 
-    fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item {
+    fn item_ty(name: ident, span: span, ty: @ast::Ty) -> @ast::item {
         self.item_ty_poly(name, span, ty, ~[])
     }
 
-    fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty] {
+    fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] {
         ty_params.map(|p| self.ty_path_ast_builder(
             path(~[p.ident], self.empty_span())))
     }
diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs
index 5fcc00ef012..fcc0c84a4ff 100644
--- a/src/libsyntax/ext/pipes/check.rs
+++ b/src/libsyntax/ext/pipes/check.rs
@@ -38,7 +38,7 @@ impl ext_ctxt: proto::visitor<(), (), ()>  {
         }
     }
 
-    fn visit_message(name: ~str, _span: span, _tys: &[@ast::ty],
+    fn visit_message(name: ~str, _span: span, _tys: &[@ast::Ty],
                      this: state, next: next_state) {
         match next {
           Some({state: next, tys: next_tys}) => {
@@ -68,4 +68,4 @@ impl ext_ctxt: proto::visitor<(), (), ()>  {
           None => ()
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs
index 5c15b616b4a..8f2b92a720c 100644
--- a/src/libsyntax/ext/pipes/parse_proto.rs
+++ b/src/libsyntax/ext/pipes/parse_proto.rs
@@ -10,7 +10,7 @@ trait proto_parser {
     fn parse_state(proto: protocol);
 }
 
-impl parser: proto_parser {
+impl parser::Parser: proto_parser {
     fn parse_proto(id: ~str) -> protocol {
         let proto = protocol(id, self.span);
 
diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs
index 874ea01e9b0..7e1cbe9ad0d 100644
--- a/src/libsyntax/ext/pipes/pipec.rs
+++ b/src/libsyntax/ext/pipes/pipec.rs
@@ -181,7 +181,7 @@ impl message: gen_send {
           }
         }
 
-    fn to_ty(cx: ext_ctxt) -> @ast::ty {
+    fn to_ty(cx: ext_ctxt) -> @ast::Ty {
         cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span())
           .add_tys(cx.ty_vars(self.get_params())))
     }
@@ -360,7 +360,7 @@ impl protocol: gen_init {
         }}
     }
 
-    fn buffer_ty_path(cx: ext_ctxt) -> @ast::ty {
+    fn buffer_ty_path(cx: ext_ctxt) -> @ast::Ty {
         let mut params: ~[ast::ty_param] = ~[];
         for (copy self.states).each |s| {
             for s.ty_params.each |tp| {
@@ -444,13 +444,13 @@ impl ~[@ast::item]: to_source {
     }
 }
 
-impl @ast::ty: to_source {
+impl @ast::Ty: to_source {
     fn to_source(cx: ext_ctxt) -> ~str {
         ty_to_str(self, cx.parse_sess().interner)
     }
 }
 
-impl ~[@ast::ty]: to_source {
+impl ~[@ast::Ty]: to_source {
     fn to_source(cx: ext_ctxt) -> ~str {
         str::connect(self.map(|i| i.to_source(cx)), ~", ")
     }
diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs
index a501df4c32d..229e55fdfcc 100644
--- a/src/libsyntax/ext/pipes/proto.rs
+++ b/src/libsyntax/ext/pipes/proto.rs
@@ -35,11 +35,11 @@ impl direction {
     }
 }
 
-type next_state = Option<{state: ~str, tys: ~[@ast::ty]}>;
+type next_state = Option<{state: ~str, tys: ~[@ast::Ty]}>;
 
 enum message {
     // name, span, data, current state, next state
-    message(~str, span, ~[@ast::ty], state, next_state)
+    message(~str, span, ~[@ast::Ty], state, next_state)
 }
 
 impl message {
@@ -78,7 +78,7 @@ enum state {
 
 impl state {
     fn add_message(name: ~str, span: span,
-                   +data: ~[@ast::ty], next: next_state) {
+                   +data: ~[@ast::Ty], next: next_state) {
         self.messages.push(message(name, span, data, self,
                                    next));
     }
@@ -92,7 +92,7 @@ impl state {
     }
 
     /// Returns the type that is used for the messages.
-    fn to_ty(cx: ext_ctxt) -> @ast::ty {
+    fn to_ty(cx: ext_ctxt) -> @ast::Ty {
         cx.ty_path_ast_builder
             (path(~[cx.ident_of(self.name)],self.span).add_tys(
                 cx.ty_vars(self.ty_params)))
@@ -200,7 +200,7 @@ impl protocol {
 trait visitor<Tproto, Tstate, Tmessage> {
     fn visit_proto(proto: protocol, st: &[Tstate]) -> Tproto;
     fn visit_state(state: state, m: &[Tmessage]) -> Tstate;
-    fn visit_message(name: ~str, spane: span, tys: &[@ast::ty],
+    fn visit_message(name: ~str, spane: span, tys: &[@ast::Ty],
                      this: state, next: next_state) -> Tmessage;
 }
 
diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs
index ee9602598d1..a83789642cc 100644
--- a/src/libsyntax/ext/qquote.rs
+++ b/src/libsyntax/ext/qquote.rs
@@ -1,7 +1,7 @@
 use ast::{crate, expr_, mac_invoc,
                      mac_aq, mac_var};
 use parse::parser;
-use parse::parser::parse_from_source_str;
+use parse::parser::{Parser, parse_from_source_str};
 use dvec::DVec;
 use parse::token::ident_interner;
 
@@ -24,7 +24,7 @@ struct gather_item {
 type aq_ctxt = @{lo: uint, gather: DVec<gather_item>};
 enum fragment {
     from_expr(@ast::expr),
-    from_ty(@ast::ty)
+    from_ty(@ast::Ty)
 }
 
 fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] {
@@ -68,7 +68,7 @@ impl @ast::expr: qq_helper {
     }
     fn get_fold_fn() -> ~str {~"fold_expr"}
 }
-impl @ast::ty: qq_helper {
+impl @ast::Ty: qq_helper {
     fn span() -> span {self.span}
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
     fn extract_mac() -> Option<ast::mac_> {
@@ -186,13 +186,13 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
     };
 }
 
-fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod(~[]) }
-fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) }
-fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt(~[]) }
-fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() }
-fn parse_pat(p: parser) -> @ast::pat { p.parse_pat(true) }
+fn parse_crate(p: Parser) -> @ast::crate { p.parse_crate_mod(~[]) }
+fn parse_ty(p: Parser) -> @ast::Ty { p.parse_ty(false) }
+fn parse_stmt(p: Parser) -> @ast::stmt { p.parse_stmt(~[]) }
+fn parse_expr(p: Parser) -> @ast::expr { p.parse_expr() }
+fn parse_pat(p: Parser) -> @ast::pat { p.parse_pat(true) }
 
-fn parse_item(p: parser) -> @ast::item {
+fn parse_item(p: Parser) -> @ast::item {
     match p.parse_item(~[]) {
       Some(item) => item,
       None       => fail ~"parse_item: parsing an item failed"
@@ -200,7 +200,7 @@ fn parse_item(p: parser) -> @ast::item {
 }
 
 fn finish<T: qq_helper>
-    (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser) -> T)
+    (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: Parser) -> T)
     -> @ast::expr
 {
     let cm = ecx.codemap();
@@ -309,7 +309,7 @@ fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate {
     @f.fold_crate(*n)
 }
 fn fold_expr(f: ast_fold, &&n: @ast::expr) -> @ast::expr {f.fold_expr(n)}
-fn fold_ty(f: ast_fold, &&n: @ast::ty) -> @ast::ty {f.fold_ty(n)}
+fn fold_ty(f: ast_fold, &&n: @ast::Ty) -> @ast::Ty {f.fold_ty(n)}
 fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {
     f.fold_item(n).get() //HACK: we know we don't drop items
 }
diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs
index e16e1c55349..bec29c9a835 100644
--- a/src/libsyntax/ext/simplext.rs
+++ b/src/libsyntax/ext/simplext.rs
@@ -6,7 +6,7 @@ use base::*;
 
 use fold::*;
 use ast_util::respan;
-use ast::{ident, path, ty, blk_, expr, expr_path,
+use ast::{ident, path, Ty, blk_, expr, expr_path,
              expr_vec, expr_mac, mac_invoc, node_id, expr_index};
 
 export add_new_extension;
@@ -29,7 +29,7 @@ enum matchable {
     match_expr(@expr),
     match_path(@path),
     match_ident(ast::spanned<ident>),
-    match_ty(@ty),
+    match_ty(@Ty),
     match_block(ast::blk),
     match_exact, /* don't bind anything, just verify the AST traversal */
 }
diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs
index c2d4de1b423..0c7d408db7c 100644
--- a/src/libsyntax/ext/trace_macros.rs
+++ b/src/libsyntax/ext/trace_macros.rs
@@ -2,7 +2,7 @@ use codemap::span;
 use ext::base::ext_ctxt;
 use ast::tt_delim;
 use parse::lexer::{new_tt_reader, reader};
-use parse::parser::{parser, SOURCE_FILE};
+use parse::parser::{Parser, SOURCE_FILE};
 use parse::common::parser_common;
 
 fn expand_trace_macros(cx: ext_ctxt, sp: span,
@@ -13,7 +13,7 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span,
     let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
                                cx.parse_sess().interner, None, tt);
     let rdr = tt_rdr as reader;
-    let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
+    let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE);
 
     let arg = cx.str_of(rust_parser.parse_ident());
     match arg {
@@ -21,7 +21,7 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span,
       ~"false" => cx.set_trace_macros(false),
       _ => cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`")
     }
-    let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
+    let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE);
     let result = rust_parser.parse_expr();
     base::mr_expr(result)
 }
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 0b2070c8c86..17122b85fb3 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -1,9 +1,9 @@
 // Earley-like parser for macros.
 use parse::token;
-use parse::token::{token, EOF, to_str, nonterminal};
+use parse::token::{Token, EOF, to_str, nonterminal};
 use parse::lexer::*; //resolve bug?
 //import parse::lexer::{reader, tt_reader, tt_reader_as_reader};
-use parse::parser::{parser,SOURCE_FILE};
+use parse::parser::{Parser, SOURCE_FILE};
 //import parse::common::parser_common;
 use parse::common::*; //resolve bug?
 use parse::parse_sess;
@@ -97,7 +97,7 @@ fn is_some(&&mpu: matcher_pos_up) -> bool {
 
 type matcher_pos = ~{
     elts: ~[ast::matcher], // maybe should be /&? Need to understand regions.
-    sep: Option<token>,
+    sep: Option<Token>,
     mut idx: uint,
     mut up: matcher_pos_up, // mutable for swapping only
     matches: ~[DVec<@named_match>],
@@ -122,7 +122,7 @@ fn count_names(ms: &[matcher]) -> uint {
 }
 
 #[allow(non_implicitly_copyable_typarams)]
-fn initial_matcher_pos(ms: ~[matcher], sep: Option<token>, lo: uint)
+fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: uint)
     -> matcher_pos {
     let mut match_idx_hi = 0u;
     for ms.each() |elt| {
@@ -354,7 +354,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
                 }
                 rdr.next_token();
             } else /* bb_eis.len() == 1 */ {
-                let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE);
+                let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE);
 
                 let ei = bb_eis.pop();
                 match ei.elts[ei.idx].node {
@@ -381,7 +381,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
     }
 }
 
-fn parse_nt(p: parser, name: ~str) -> nonterminal {
+fn parse_nt(p: Parser, name: ~str) -> nonterminal {
     match name {
       ~"item" => match p.parse_item(~[]) {
         Some(i) => token::nt_item(i),
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 52369ad7207..31bc375a76d 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -4,7 +4,7 @@ use ast::{ident, matcher_, matcher, match_tok,
              match_nonterminal, match_seq, tt_delim};
 use parse::lexer::{new_tt_reader, reader};
 use parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt};
-use parse::parser::{parser, SOURCE_FILE};
+use parse::parser::{Parser, SOURCE_FILE};
 use macro_parser::{parse, parse_or_else, success, failure, named_match,
                       matched_seq, matched_nonterminal, error};
 use std::map::HashMap;
@@ -86,7 +86,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
                     // rhs has holes ( `$id` and `$(...)` that need filled)
                     let trncbr = new_tt_reader(s_d, itr, Some(named_matches),
                                                ~[rhs]);
-                    let p = parser(cx.parse_sess(), cx.cfg(),
+                    let p = Parser(cx.parse_sess(), cx.cfg(),
                                    trncbr as reader, SOURCE_FILE);
                     let e = p.parse_expr();
                     return mr_expr(e);
@@ -111,4 +111,4 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
         name: *cx.parse_sess().interner.get(name),
         ext: expr_tt({expander: exp, span: Some(sp)})
     });
-}
\ No newline at end of file
+}
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index a8a41cca6cb..238f9db6ac5 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -2,8 +2,7 @@ use diagnostic::span_handler;
 use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
 use macro_parser::{named_match, matched_seq, matched_nonterminal};
 use codemap::span;
-use parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident,
-                      ident_interner};
+use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner};
 use std::map::HashMap;
 
 export tt_reader,  new_tt_reader, dup_tt_reader, tt_next_token;
@@ -19,7 +18,7 @@ type tt_frame = @{
     readme: ~[ast::token_tree],
     mut idx: uint,
     dotdotdoted: bool,
-    sep: Option<token>,
+    sep: Option<Token>,
     up: tt_frame_up,
 };
 
@@ -32,7 +31,7 @@ type tt_reader = @{
     mut repeat_idx: ~[uint],
     mut repeat_len: ~[uint],
     /* cached: */
-    mut cur_tok: token,
+    mut cur_tok: Token,
     mut cur_span: span
 };
 
@@ -134,7 +133,7 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis {
 }
 
 
-fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
+fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} {
     let ret_val = { tok: r.cur_tok, sp: r.cur_span };
     while r.cur.idx >= r.cur.readme.len() {
         /* done with this set; pop or repeat? */
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 68d9cd80430..564debefa25 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -33,7 +33,7 @@ trait ast_fold {
     fn fold_pat(&&v: @pat) -> @pat;
     fn fold_decl(&&v: @decl) -> @decl;
     fn fold_expr(&&v: @expr) -> @expr;
-    fn fold_ty(&&v: @ty) -> @ty;
+    fn fold_ty(&&v: @Ty) -> @Ty;
     fn fold_mod(_mod) -> _mod;
     fn fold_foreign_mod(foreign_mod) -> foreign_mod;
     fn fold_variant(variant) -> variant;
@@ -728,7 +728,7 @@ impl ast_fold_precursor: ast_fold {
               node: n,
               span: self.new_span(s)};
     }
-    fn fold_ty(&&x: @ty) -> @ty {
+    fn fold_ty(&&x: @Ty) -> @Ty {
         let (n, s) = self.fold_ty(x.node, x.span, self as ast_fold);
         return @{id: self.new_id(x.id), node: n, span: self.new_span(s)};
     }
diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs
index 2c04b2a1419..e38ee7ff037 100644
--- a/src/libsyntax/parse.rs
+++ b/src/libsyntax/parse.rs
@@ -12,7 +12,7 @@ export parse_expr_from_source_str, parse_item_from_source_str;
 export parse_stmt_from_source_str;
 export parse_from_source_str;
 
-use parser::parser;
+use parser::Parser;
 use attr::parser_attr;
 use common::parser_common;
 use ast::node_id;
@@ -22,7 +22,7 @@ use lexer::{reader, string_reader};
 use parse::token::{ident_interner, mk_ident_interner};
 
 type parse_sess = @{
-    cm: codemap::codemap,
+    cm: codemap::CodeMap,
     mut next_id: node_id,
     span_diagnostic: span_handler,
     interner: @ident_interner,
@@ -40,7 +40,7 @@ fn new_parse_sess(demitter: Option<emitter>) -> parse_sess {
              mut chpos: 0u, mut byte_pos: 0u};
 }
 
-fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::codemap)
+fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::CodeMap)
     -> parse_sess {
     return @{cm: cm,
              mut next_id: 1,
@@ -142,7 +142,7 @@ fn parse_stmt_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg,
     return r;
 }
 
-fn parse_from_source_str<T>(f: fn (p: parser) -> T,
+fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
                             name: ~str, ss: codemap::file_substr,
                             source: @~str, cfg: ast::crate_cfg,
                             sess: parse_sess)
@@ -170,19 +170,19 @@ fn next_node_id(sess: parse_sess) -> node_id {
 
 fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
                                   +name: ~str, +ss: codemap::file_substr,
-                                  source: @~str) -> (parser, string_reader) {
+                                  source: @~str) -> (Parser, string_reader) {
     let ftype = parser::SOURCE_FILE;
     let filemap = codemap::new_filemap_w_substr
         (name, ss, source, sess.chpos, sess.byte_pos);
     sess.cm.files.push(filemap);
     let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap,
                                         sess.interner);
-    return (parser(sess, cfg, srdr as reader, ftype), srdr);
+    return (Parser(sess, cfg, srdr as reader, ftype), srdr);
 }
 
 fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
                               +name: ~str, +ss: codemap::file_substr,
-                              source: @~str) -> parser {
+                              source: @~str) -> Parser {
     let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source);
     move p
 }
@@ -190,7 +190,7 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
 
 fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg,
                             path: &Path, ftype: parser::file_type) ->
-   (parser, string_reader) {
+   (Parser, string_reader) {
     let res = io::read_whole_file_str(path);
     match res {
       result::Ok(_) => { /* Continue. */ }
@@ -202,18 +202,18 @@ fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg,
     sess.cm.files.push(filemap);
     let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap,
                                         sess.interner);
-    return (parser(sess, cfg, srdr as reader, ftype), srdr);
+    return (Parser(sess, cfg, srdr as reader, ftype), srdr);
 }
 
 fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: &Path,
-                        ftype: parser::file_type) -> parser {
+                        ftype: parser::file_type) -> Parser {
     let (p, _) = new_parser_etc_from_file(sess, cfg, path, ftype);
     move p
 }
 
 fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg,
-                      tt: ~[ast::token_tree]) -> parser {
+                      tt: ~[ast::token_tree]) -> Parser {
     let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner,
                                     None, tt);
-    return parser(sess, cfg, trdr as reader, parser::SOURCE_FILE)
+    return Parser(sess, cfg, trdr as reader, parser::SOURCE_FILE)
 }
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 9be4909814b..42101a431d6 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -23,7 +23,7 @@ trait parser_attr {
     fn parse_optional_meta() -> ~[@ast::meta_item];
 }
 
-impl parser: parser_attr {
+impl Parser: parser_attr {
 
     fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
         -> attr_or_ext
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index c8c30ee7fa9..50c22c08f4f 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -1,63 +1,63 @@
 use std::map::{HashMap};
 use ast_util::spanned;
-use parser::parser;
+use parser::Parser;
 use lexer::reader;
 
 type seq_sep = {
-    sep: Option<token::token>,
+    sep: Option<token::Token>,
     trailing_sep_allowed: bool
 };
 
-fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep {
+fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep {
     return {sep: option::Some(t), trailing_sep_allowed: false};
 }
-fn seq_sep_trailing_allowed(t: token::token) -> seq_sep {
+fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep {
     return {sep: option::Some(t), trailing_sep_allowed: true};
 }
 fn seq_sep_none() -> seq_sep {
     return {sep: option::None, trailing_sep_allowed: false};
 }
 
-fn token_to_str(reader: reader, ++token: token::token) -> ~str {
+fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
     token::to_str(reader.interner(), token)
 }
 
 trait parser_common {
-    fn unexpected_last(t: token::token) -> !;
+    fn unexpected_last(t: token::Token) -> !;
     fn unexpected() -> !;
-    fn expect(t: token::token);
+    fn expect(t: token::Token);
     fn parse_ident() -> ast::ident;
     fn parse_path_list_ident() -> ast::path_list_ident;
     fn parse_value_ident() -> ast::ident;
-    fn eat(tok: token::token) -> bool;
+    fn eat(tok: token::Token) -> bool;
     // A sanity check that the word we are asking for is a known keyword
     fn require_keyword(word: ~str);
-    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool;
+    fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool;
     fn is_keyword(word: ~str) -> bool;
-    fn is_any_keyword(tok: token::token) -> bool;
+    fn is_any_keyword(tok: token::Token) -> bool;
     fn eat_keyword(word: ~str) -> bool;
     fn expect_keyword(word: ~str);
     fn expect_gt();
-    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::token>,
-                                       f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_to_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> spanned<~[T]>;
-    fn parse_seq_to_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> ~[T];
-    fn parse_seq_to_before_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> ~[T];
-    fn parse_unspanned_seq<T: Copy>(bra: token::token,
-                                    ket: token::token,
+    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
+                                       f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_to_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> spanned<~[T]>;
+    fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                 f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                        f: fn(Parser) -> T) -> ~[T];
+    fn parse_unspanned_seq<T: Copy>(bra: token::Token,
+                                    ket: token::Token,
                                     sep: seq_sep,
-                                    f: fn(parser) -> T) -> ~[T];
-    fn parse_seq<T: Copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<~[T]>;
+                                    f: fn(Parser) -> T) -> ~[T];
+    fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
+                          f: fn(Parser) -> T) -> spanned<~[T]>;
 }
 
-impl parser: parser_common {
-    fn unexpected_last(t: token::token) -> ! {
+impl Parser: parser_common {
+    fn unexpected_last(t: token::Token) -> ! {
         self.span_fatal(
             copy self.last_span,
             ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`");
@@ -68,7 +68,7 @@ impl parser: parser_common {
                    + token_to_str(self.reader, self.token) + ~"`");
     }
 
-    fn expect(t: token::token) {
+    fn expect(t: token::Token) {
         if self.token == t {
             self.bump();
         } else {
@@ -104,7 +104,7 @@ impl parser: parser_common {
         return self.parse_ident();
     }
 
-    fn eat(tok: token::token) -> bool {
+    fn eat(tok: token::Token) -> bool {
         return if self.token == tok { self.bump(); true } else { false };
     }
 
@@ -117,14 +117,14 @@ impl parser: parser_common {
         }
     }
 
-    fn token_is_word(word: ~str, ++tok: token::token) -> bool {
+    fn token_is_word(word: ~str, ++tok: token::Token) -> bool {
         match tok {
           token::IDENT(sid, false) => { *self.id_to_str(sid) == word }
           _ => { false }
         }
     }
 
-    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool {
+    fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool {
         self.require_keyword(word);
         self.token_is_word(word, tok)
     }
@@ -133,7 +133,7 @@ impl parser: parser_common {
         self.token_is_keyword(word, self.token)
     }
 
-    fn is_any_keyword(tok: token::token) -> bool {
+    fn is_any_keyword(tok: token::Token) -> bool {
         match tok {
           token::IDENT(sid, false) => {
             self.keywords.contains_key_ref(self.id_to_str(sid))
@@ -216,8 +216,8 @@ impl parser: parser_common {
         }
     }
 
-    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::token>,
-                                       f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
+                                       f: fn(Parser) -> T) -> ~[T] {
         let mut first = true;
         let mut v = ~[];
         while self.token != token::GT
@@ -235,16 +235,16 @@ impl parser: parser_common {
         return v;
     }
 
-    fn parse_seq_to_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> ~[T] {
         let v = self.parse_seq_to_before_gt(sep, f);
         self.expect_gt();
 
         return v;
     }
 
-    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::token>,
-                                f: fn(parser) -> T) -> spanned<~[T]> {
+    fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
+                                f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(token::LT);
         let result = self.parse_seq_to_before_gt::<T>(sep, f);
@@ -253,16 +253,16 @@ impl parser: parser_common {
         return spanned(lo, hi, result);
     }
 
-    fn parse_seq_to_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                 f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                 f: fn(Parser) -> T) -> ~[T] {
         let val = self.parse_seq_to_before_end(ket, sep, f);
         self.bump();
         return val;
     }
 
 
-    fn parse_seq_to_before_end<T: Copy>(ket: token::token, sep: seq_sep,
-                                        f: fn(parser) -> T) -> ~[T] {
+    fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
+                                        f: fn(Parser) -> T) -> ~[T] {
         let mut first: bool = true;
         let mut v: ~[T] = ~[];
         while self.token != ket {
@@ -279,10 +279,10 @@ impl parser: parser_common {
         return v;
     }
 
-    fn parse_unspanned_seq<T: Copy>(bra: token::token,
-                                    ket: token::token,
+    fn parse_unspanned_seq<T: Copy>(bra: token::Token,
+                                    ket: token::Token,
                                     sep: seq_sep,
-                                    f: fn(parser) -> T) -> ~[T] {
+                                    f: fn(Parser) -> T) -> ~[T] {
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
         self.bump();
@@ -291,8 +291,8 @@ impl parser: parser_common {
 
     // NB: Do not use this function unless you actually plan to place the
     // spanned list in the AST.
-    fn parse_seq<T: Copy>(bra: token::token, ket: token::token, sep: seq_sep,
-                          f: fn(parser) -> T) -> spanned<~[T]> {
+    fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
+                          f: fn(Parser) -> T) -> spanned<~[T]> {
         let lo = self.span.lo;
         self.expect(bra);
         let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index c9106028491..56c9d4de9f3 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -1,4 +1,4 @@
-use parser::{parser, SOURCE_FILE};
+use parser::{Parser, SOURCE_FILE};
 use attr::parser_attr;
 
 export eval_crate_directives_to_mod;
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 06fcc1cf958..8f57d733eb5 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -10,11 +10,11 @@ export string_reader_as_reader, tt_reader_as_reader;
 
 trait reader {
     fn is_eof() -> bool;
-    fn next_token() -> {tok: token::token, sp: span};
+    fn next_token() -> {tok: token::Token, sp: span};
     fn fatal(~str) -> !;
     fn span_diag() -> span_handler;
     pure fn interner() -> @token::ident_interner;
-    fn peek() -> {tok: token::token, sp: span};
+    fn peek() -> {tok: token::Token, sp: span};
     fn dup() -> reader;
 }
 
@@ -28,7 +28,7 @@ type string_reader = @{
     filemap: codemap::filemap,
     interner: @token::ident_interner,
     /* cached: */
-    mut peek_tok: token::token,
+    mut peek_tok: token::Token,
     mut peek_span: span
 };
 
@@ -69,7 +69,7 @@ fn dup_string_reader(&&r: string_reader) -> string_reader {
 
 impl string_reader: reader {
     fn is_eof() -> bool { is_eof(self) }
-    fn next_token() -> {tok: token::token, sp: span} {
+    fn next_token() -> {tok: token::Token, sp: span} {
         let ret_val = {tok: self.peek_tok, sp: self.peek_span};
         string_advance_token(self);
         return ret_val;
@@ -79,7 +79,7 @@ impl string_reader: reader {
     }
     fn span_diag() -> span_handler { self.span_diagnostic }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::token, sp: span} {
+    fn peek() -> {tok: token::Token, sp: span} {
         {tok: self.peek_tok, sp: self.peek_span}
     }
     fn dup() -> reader { dup_string_reader(self) as reader }
@@ -87,7 +87,7 @@ impl string_reader: reader {
 
 impl tt_reader: reader {
     fn is_eof() -> bool { self.cur_tok == token::EOF }
-    fn next_token() -> {tok: token::token, sp: span} {
+    fn next_token() -> {tok: token::Token, sp: span} {
         /* weird resolve bug: if the following `if`, or any of its
         statements are removed, we get resolution errors */
         if false {
@@ -101,7 +101,7 @@ impl tt_reader: reader {
     }
     fn span_diag() -> span_handler { self.sp_diag }
     pure fn interner() -> @token::ident_interner { self.interner }
-    fn peek() -> {tok: token::token, sp: span} {
+    fn peek() -> {tok: token::Token, sp: span} {
         { tok: self.cur_tok, sp: self.cur_span }
     }
     fn dup() -> reader { dup_tt_reader(self) as reader }
@@ -196,14 +196,14 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
 
 // might return a sugared-doc-attr
 fn consume_whitespace_and_comments(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
     while is_whitespace(rdr.curr) { bump(rdr); }
     return consume_any_line_comment(rdr);
 }
 
 // might return a sugared-doc-attr
 fn consume_any_line_comment(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
     if rdr.curr == '/' {
         match nextch(rdr) {
           '/' => {
@@ -246,7 +246,7 @@ fn consume_any_line_comment(rdr: string_reader)
 
 // might return a sugared-doc-attr
 fn consume_block_comment(rdr: string_reader)
-                                -> Option<{tok: token::token, sp: span}> {
+                                -> Option<{tok: token::Token, sp: span}> {
 
     // block comments starting with "/**" or "/*!" are doc-comments
     if rdr.curr == '*' || rdr.curr == '!' {
@@ -317,7 +317,7 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
     };
 }
 
-fn scan_number(c: char, rdr: string_reader) -> token::token {
+fn scan_number(c: char, rdr: string_reader) -> token::Token {
     let mut num_str, base = 10u, c = c, n = nextch(rdr);
     if c == '0' && n == 'x' {
         bump(rdr);
@@ -435,7 +435,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char {
     return accum_int as char;
 }
 
-fn next_token_inner(rdr: string_reader) -> token::token {
+fn next_token_inner(rdr: string_reader) -> token::Token {
     let mut accum_str = ~"";
     let mut c = rdr.curr;
     if (c >= 'a' && c <= 'z')
@@ -460,7 +460,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
     if is_dec_digit(c) {
         return scan_number(c, rdr);
     }
-    fn binop(rdr: string_reader, op: token::binop) -> token::token {
+    fn binop(rdr: string_reader, op: token::binop) -> token::Token {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 828d498ca3c..c0e01fb1944 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -10,7 +10,7 @@ removed.
 use codemap::span;
 use ast::{expr, expr_lit, lit_nil};
 use ast_util::{respan};
-use token::token;
+use token::Token;
 
 /// The specific types of unsupported syntax
 pub enum ObsoleteSyntax {
@@ -47,7 +47,7 @@ pub trait ObsoleteReporter {
     fn obsolete_expr(sp: span, kind: ObsoleteSyntax) -> @expr;
 }
 
-impl parser : ObsoleteReporter {
+impl Parser : ObsoleteReporter {
     /// Reports an obsolete syntax non-fatal error.
     fn obsolete(sp: span, kind: ObsoleteSyntax) {
         let (kind_str, desc) = match kind {
@@ -121,7 +121,7 @@ impl parser : ObsoleteReporter {
         }
     }
 
-    fn token_is_obsolete_ident(ident: &str, token: token) -> bool {
+    fn token_is_obsolete_ident(ident: &str, token: Token) -> bool {
         match token {
             token::IDENT(copy sid, _) => {
                 str::eq_slice(*self.id_to_str(sid), ident)
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 973822ddff9..e29620a7e79 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -6,7 +6,7 @@ use std::map::HashMap;
 use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
             INTERPOLATED, special_idents};
 use codemap::{span,fss_none};
-use util::interner::interner;
+use util::interner::Interner;
 use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
 use lexer::reader;
 use prec::{as_prec, token_to_binop};
@@ -58,7 +58,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
              stmt_semi, struct_def, struct_field, struct_variant_kind,
              subtract, sty_box, sty_by_ref, sty_region, sty_static, sty_uniq,
              sty_value, token_tree, trait_method, trait_ref, tt_delim, tt_seq,
-             tt_tok, tt_nonterminal, tuple_variant_kind, ty, ty_, ty_bot,
+             tt_tok, tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot,
              ty_box, ty_field, ty_fn, ty_infer, ty_mac, ty_method, ty_nil,
              ty_param, ty_param_bound, ty_path, ty_ptr, ty_rec, ty_rptr,
              ty_tup, ty_u32, ty_uniq, ty_vec, ty_fixed_length, type_value_ns,
@@ -71,7 +71,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
              expr_vstore_uniq};
 
 export file_type;
-export parser;
+export Parser;
 export CRATE_FILE;
 export SOURCE_FILE;
 
@@ -190,14 +190,14 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
 
 /* ident is handled by common.rs */
 
-fn parser(sess: parse_sess, cfg: ast::crate_cfg,
-          +rdr: reader, ftype: file_type) -> parser {
+fn Parser(sess: parse_sess, cfg: ast::crate_cfg,
+          +rdr: reader, ftype: file_type) -> Parser {
 
     let tok0 = rdr.next_token();
     let span0 = tok0.sp;
     let interner = rdr.interner();
 
-    parser {
+    Parser {
         reader: move rdr,
         interner: move interner,
         sess: sess,
@@ -223,14 +223,14 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg,
     }
 }
 
-struct parser {
+struct Parser {
     sess: parse_sess,
     cfg: crate_cfg,
     file_type: file_type,
-    mut token: token::token,
+    mut token: token::Token,
     mut span: span,
     mut last_span: span,
-    mut buffer: [mut {tok: token::token, sp: span}]/4,
+    mut buffer: [mut {tok: token::Token, sp: span}]/4,
     mut buffer_start: int,
     mut buffer_end: int,
     mut restriction: restriction,
@@ -247,7 +247,7 @@ struct parser {
     drop {} /* do not copy the parser; its state is tied to outside state */
 }
 
-impl parser {
+impl Parser {
     fn bump() {
         self.last_span = self.span;
         let next = if self.buffer_start == self.buffer_end {
@@ -260,7 +260,7 @@ impl parser {
         self.token = next.tok;
         self.span = next.sp;
     }
-    fn swap(next: token::token, lo: uint, hi: uint) {
+    fn swap(next: token::Token, lo: uint, hi: uint) {
         self.token = next;
         self.span = mk_sp(lo, hi);
     }
@@ -270,7 +270,7 @@ impl parser {
         }
         return (4 - self.buffer_start) + self.buffer_end;
     }
-    fn look_ahead(distance: uint) -> token::token {
+    fn look_ahead(distance: uint) -> token::Token {
         let dist = distance as int;
         while self.buffer_length() < dist {
             self.buffer[self.buffer_end] = self.reader.next_token();
@@ -411,7 +411,7 @@ impl parser {
         });
     }
 
-    fn parse_ret_ty() -> (ret_style, @ty) {
+    fn parse_ret_ty() -> (ret_style, @Ty) {
         return if self.eat(token::RARROW) {
             let lo = self.span.lo;
             if self.eat(token::NOT) {
@@ -472,7 +472,7 @@ impl parser {
         self.region_from_name(name)
     }
 
-    fn parse_ty(colons_before_params: bool) -> @ty {
+    fn parse_ty(colons_before_params: bool) -> @Ty {
         maybe_whole!(self, nt_ty);
 
         let lo = self.span.lo;
@@ -609,10 +609,10 @@ impl parser {
         }
     }
 
-    fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item)
+    fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item)
         -> arg_or_capture_item {
 
-        fn parse_capture_item(p:parser, is_move: bool) -> capture_item {
+        fn parse_capture_item(p:Parser, is_move: bool) -> capture_item {
             let sp = mk_sp(p.span.lo, p.span.hi);
             let ident = p.parse_ident();
             @{id: p.get_id(), is_move: is_move, name: ident, span: sp}
@@ -728,7 +728,7 @@ impl parser {
         }
     }
 
-    fn lit_from_token(tok: token::token) -> lit_ {
+    fn lit_from_token(tok: token::Token) -> lit_ {
         match tok {
           token::LIT_INT(i, it) => lit_int(i, it),
           token::LIT_UINT(u, ut) => lit_uint(u, ut),
@@ -760,8 +760,8 @@ impl parser {
     }
 
     fn parse_path_without_tps_(
-        parse_ident: fn(parser) -> ident,
-        parse_last_ident: fn(parser) -> ident) -> @path {
+        parse_ident: fn(Parser) -> ident,
+        parse_last_ident: fn(Parser) -> ident) -> @path {
 
         maybe_whole!(self, nt_path);
         let lo = self.span.lo;
@@ -842,7 +842,7 @@ impl parser {
         }
     }
 
-    fn parse_field(sep: token::token) -> field {
+    fn parse_field(sep: token::Token) -> field {
         let lo = self.span.lo;
         let m = self.parse_mutability();
         let i = self.parse_ident();
@@ -1220,7 +1220,7 @@ impl parser {
         return e;
     }
 
-    fn parse_sep_and_zerok() -> (Option<token::token>, bool) {
+    fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
         if self.token == token::BINOP(token::STAR)
             || self.token == token::BINOP(token::PLUS) {
             let zerok = self.token == token::BINOP(token::STAR);
@@ -1243,7 +1243,7 @@ impl parser {
     fn parse_token_tree() -> token_tree {
         maybe_whole!(deref self, nt_tt);
 
-        fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
+        fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree {
             match p.token {
               token::RPAREN | token::RBRACE | token::RBRACKET
               if !delim_ok => {
@@ -1310,8 +1310,8 @@ impl parser {
     // This goofy function is necessary to correctly match parens in matchers.
     // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be
     // invalid. It's similar to common::parse_seq.
-    fn parse_matcher_subseq(name_idx: @mut uint, bra: token::token,
-                            ket: token::token) -> ~[matcher] {
+    fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token,
+                            ket: token::Token) -> ~[matcher] {
         let mut ret_val = ~[];
         let mut lparens = 0u;
 
@@ -2158,7 +2158,7 @@ impl parser {
     fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt {
         maybe_whole!(self, nt_stmt);
 
-        fn check_expected_item(p: parser, current_attrs: ~[attribute]) {
+        fn check_expected_item(p: Parser, current_attrs: ~[attribute]) {
             // If we have attributes then we should have an item
             if vec::is_not_empty(current_attrs) {
                 p.fatal(~"expected item");
@@ -2221,7 +2221,7 @@ impl parser {
 
         maybe_whole!(pair_empty self, nt_block);
 
-        fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
+        fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) ->
             {inner: ~[attribute], next: ~[attribute]} {
             if parse_attrs {
                 p.parse_inner_attrs_and_next()
@@ -2386,7 +2386,7 @@ impl parser {
         } else { ~[] }
     }
 
-    fn parse_fn_decl(parse_arg_fn: fn(parser) -> arg_or_capture_item)
+    fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item)
         -> (fn_decl, capture_clause) {
 
         let args_or_capture_items: ~[arg_or_capture_item] =
@@ -2420,11 +2420,11 @@ impl parser {
     }
 
     fn parse_fn_decl_with_self(parse_arg_fn:
-                                    fn(parser) -> arg_or_capture_item)
+                                    fn(Parser) -> arg_or_capture_item)
                             -> (self_ty, fn_decl, capture_clause) {
 
         fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_,
-                               p: parser) -> ast::self_ty_ {
+                               p: Parser) -> ast::self_ty_ {
             // We need to make sure it isn't a mode or a type
             if p.token_is_keyword(~"self", p.look_ahead(1)) ||
                 ((p.token_is_keyword(~"const", p.look_ahead(1)) ||
@@ -2604,7 +2604,7 @@ impl parser {
     // Parses four variants (with the region/type params always optional):
     //    impl<T> ~[T] : to_str { ... }
     fn parse_item_impl() -> item_info {
-        fn wrap_path(p: parser, pt: @path) -> @ty {
+        fn wrap_path(p: Parser, pt: @path) -> @Ty {
             @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
         }
 
@@ -2664,7 +2664,7 @@ impl parser {
           ref_id: self.get_id(), impl_id: self.get_id()}
     }
 
-    fn parse_trait_ref_list(ket: token::token) -> ~[@trait_ref] {
+    fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] {
         self.parse_seq_to_before_end(
             ket, seq_sep_trailing_disallowed(token::COMMA),
             |p| p.parse_trait_ref())
@@ -2756,7 +2756,7 @@ impl parser {
          None)
     }
 
-    fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
+    fn token_is_pound_or_doc_comment(++tok: token::Token) -> bool {
         match tok {
             token::POUND | token::DOC_COMMENT(_) => true,
             _ => false
@@ -2841,7 +2841,7 @@ impl parser {
         self.eat_keyword(~"static")
     }
 
-    fn parse_mod_items(term: token::token,
+    fn parse_mod_items(term: token::Token,
                        +first_item_attrs: ~[attribute]) -> _mod {
         // Shouldn't be any view items since we've already parsed an item attr
         let {attrs_remaining, view_items, items: starting_items} =
@@ -3222,7 +3222,7 @@ impl parser {
         }
     }
 
-    fn fn_expr_lookahead(tok: token::token) -> bool {
+    fn fn_expr_lookahead(tok: token::Token) -> bool {
         match tok {
           token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
           _ => false
@@ -3608,7 +3608,7 @@ impl parser {
         return self.fatal(~"expected crate directive");
     }
 
-    fn parse_crate_directives(term: token::token,
+    fn parse_crate_directives(term: token::Token,
                               first_outer_attr: ~[attribute]) ->
         ~[@crate_directive] {
 
diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs
index 668301db620..3fd905cb8ec 100644
--- a/src/libsyntax/parse/prec.rs
+++ b/src/libsyntax/parse/prec.rs
@@ -3,7 +3,7 @@ export unop_prec;
 export token_to_binop;
 
 use token::*;
-use token::token;
+use token::Token;
 use ast::*;
 
 /// Unary operators have higher precedence than binary
@@ -19,7 +19,7 @@ const as_prec: uint = 11u;
  * Maps a token to a record specifying the corresponding binary
  * operator and its precedence
  */
-fn token_to_binop(tok: token) -> Option<ast::binop> {
+fn token_to_binop(tok: Token) -> Option<ast::binop> {
   match tok {
       BINOP(STAR)    => Some(mul),
       BINOP(SLASH)   => Some(div),
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index a7d439b8ce6..5151fd1bac8 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -1,5 +1,5 @@
 use util::interner;
-use util::interner::interner;
+use util::interner::Interner;
 use std::map::HashMap;
 
 #[auto_serialize]
@@ -19,7 +19,7 @@ enum binop {
 
 #[auto_serialize]
 #[auto_deserialize]
-enum token {
+enum Token {
     /* Expression-operator symbols. */
     EQ,
     LT,
@@ -84,7 +84,7 @@ enum nonterminal {
     nt_stmt(@ast::stmt),
     nt_pat( @ast::pat),
     nt_expr(@ast::expr),
-    nt_ty(  @ast::ty),
+    nt_ty(  @ast::Ty),
     nt_ident(ast::ident, bool),
     nt_path(@ast::path),
     nt_tt(  @ast::token_tree), //needs @ed to break a circularity
@@ -106,7 +106,7 @@ fn binop_to_str(o: binop) -> ~str {
     }
 }
 
-fn to_str(in: @ident_interner, t: token) -> ~str {
+fn to_str(in: @ident_interner, t: Token) -> ~str {
     match t {
       EQ => ~"=",
       LT => ~"<",
@@ -192,7 +192,7 @@ fn to_str(in: @ident_interner, t: token) -> ~str {
     }
 }
 
-pure fn can_begin_expr(t: token) -> bool {
+pure fn can_begin_expr(t: Token) -> bool {
     match t {
       LPAREN => true,
       LBRACE => true,
@@ -223,7 +223,7 @@ pure fn can_begin_expr(t: token) -> bool {
 }
 
 /// what's the opposite delimiter?
-fn flip_delimiter(t: token::token) -> token::token {
+fn flip_delimiter(t: token::Token) -> token::Token {
     match t {
       token::LPAREN => token::RPAREN,
       token::LBRACE => token::RBRACE,
@@ -237,7 +237,7 @@ fn flip_delimiter(t: token::token) -> token::token {
 
 
 
-fn is_lit(t: token) -> bool {
+fn is_lit(t: Token) -> bool {
     match t {
       LIT_INT(_, _) => true,
       LIT_UINT(_, _) => true,
@@ -248,22 +248,22 @@ fn is_lit(t: token) -> bool {
     }
 }
 
-pure fn is_ident(t: token) -> bool {
+pure fn is_ident(t: Token) -> bool {
     match t { IDENT(_, _) => true, _ => false }
 }
 
-pure fn is_ident_or_path(t: token) -> bool {
+pure fn is_ident_or_path(t: Token) -> bool {
     match t {
       IDENT(_, _) | INTERPOLATED(nt_path(*)) => true,
       _ => false
     }
 }
 
-pure fn is_plain_ident(t: token) -> bool {
+pure fn is_plain_ident(t: Token) -> bool {
     match t { IDENT(_, false) => true, _ => false }
 }
 
-pure fn is_bar(t: token) -> bool {
+pure fn is_bar(t: Token) -> bool {
     match t { BINOP(OR) | OROR => true, _ => false }
 }
 
@@ -314,7 +314,7 @@ mod special_idents {
 }
 
 struct ident_interner {
-    priv interner: util::interner::interner<@~str>,
+    priv interner: util::interner::Interner<@~str>,
 }
 
 impl ident_interner {
@@ -457,8 +457,8 @@ impl binop : cmp::Eq {
     pure fn ne(other: &binop) -> bool { !self.eq(other) }
 }
 
-impl token : cmp::Eq {
-    pure fn eq(other: &token) -> bool {
+impl Token : cmp::Eq {
+    pure fn eq(other: &Token) -> bool {
         match self {
             EQ => {
                 match (*other) {
@@ -720,7 +720,7 @@ impl token : cmp::Eq {
             }
         }
     }
-    pure fn ne(other: &token) -> bool { !self.eq(other) }
+    pure fn ne(other: &Token) -> bool { !self.eq(other) }
 }
 
 // Local Variables:
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index b98014f421b..5e37f7e18ec 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -1,5 +1,5 @@
 use parse::{comments, lexer, token};
-use codemap::codemap;
+use codemap::CodeMap;
 use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks};
 use pp::{consistent, inconsistent, eof};
 use ast::{required, provided};
@@ -24,7 +24,7 @@ fn no_ann() -> pp_ann {
 
 type ps =
     @{s: pp::printer,
-      cm: Option<codemap>,
+      cm: Option<CodeMap>,
       intr: @token::ident_interner,
       comments: Option<~[comments::cmnt]>,
       literals: Option<~[comments::lit]>,
@@ -45,7 +45,7 @@ fn end(s: ps) {
 
 fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps {
     return @{s: pp::mk_printer(writer, default_columns),
-             cm: None::<codemap>,
+             cm: None::<CodeMap>,
              intr: intr,
              comments: None::<~[comments::cmnt]>,
              literals: None::<~[comments::lit]>,
@@ -63,7 +63,7 @@ const default_columns: uint = 78u;
 // Requires you to pass an input filename and reader so that
 // it can scan the input text for comments and literals to
 // copy forward.
-fn print_crate(cm: codemap, intr: @ident_interner,
+fn print_crate(cm: CodeMap, intr: @ident_interner,
                span_diagnostic: diagnostic::span_handler,
                crate: @ast::crate, filename: ~str, in: io::Reader,
                out: io::Writer, ann: pp_ann, is_expanded: bool) {
@@ -91,7 +91,7 @@ fn print_crate_(s: ps, &&crate: @ast::crate) {
     eof(s.s);
 }
 
-fn ty_to_str(ty: @ast::ty, intr: @ident_interner) -> ~str {
+fn ty_to_str(ty: @ast::Ty, intr: @ident_interner) -> ~str {
     to_str(ty, print_type, intr)
 }
 
@@ -348,11 +348,11 @@ fn print_region(s: ps, region: @ast::region, sep: ~str) {
     word(s.s, sep);
 }
 
-fn print_type(s: ps, &&ty: @ast::ty) {
+fn print_type(s: ps, &&ty: @ast::Ty) {
     print_type_ex(s, ty, false);
 }
 
-fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
+fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) {
     maybe_print_comment(s, ty.span.lo);
     ibox(s, 0u);
     match ty.node {
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
index 5d991bb3551..f564589cbe0 100644
--- a/src/libsyntax/util/interner.rs
+++ b/src/libsyntax/util/interner.rs
@@ -12,14 +12,14 @@ type hash_interner<T: Const> =
     {map: HashMap<T, uint>,
      vect: DVec<T>};
 
-fn mk<T:Eq IterBytes Hash Const Copy>() -> interner<T> {
+fn mk<T:Eq IterBytes Hash Const Copy>() -> Interner<T> {
     let m = map::HashMap::<T, uint>();
     let hi: hash_interner<T> =
         {map: m, vect: DVec()};
-    move ((move hi) as interner::<T>)
+    move ((move hi) as Interner::<T>)
 }
 
-fn mk_prefill<T:Eq IterBytes Hash Const Copy>(init: ~[T]) -> interner<T> {
+fn mk_prefill<T:Eq IterBytes Hash Const Copy>(init: ~[T]) -> Interner<T> {
     let rv = mk();
     for init.each() |v| { rv.intern(*v); }
     return rv;
@@ -27,14 +27,14 @@ fn mk_prefill<T:Eq IterBytes Hash Const Copy>(init: ~[T]) -> interner<T> {
 
 
 /* when traits can extend traits, we should extend index<uint,T> to get [] */
-trait interner<T:Eq IterBytes Hash Const Copy> {
+trait Interner<T:Eq IterBytes Hash Const Copy> {
     fn intern(T) -> uint;
     fn gensym(T) -> uint;
     pure fn get(uint) -> T;
     fn len() -> uint;
 }
 
-impl <T:Eq IterBytes Hash Const Copy> hash_interner<T>: interner<T> {
+impl <T:Eq IterBytes Hash Const Copy> hash_interner<T>: Interner<T> {
     fn intern(val: T) -> uint {
         match self.map.find(val) {
           Some(idx) => return idx,
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index b402f9727dc..32fcbdfc758 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -55,7 +55,7 @@ type visitor<E> =
       visit_decl: fn@(@decl, E, vt<E>),
       visit_expr: fn@(@expr, E, vt<E>),
       visit_expr_post: fn@(@expr, E, vt<E>),
-      visit_ty: fn@(@ty, E, vt<E>),
+      visit_ty: fn@(@Ty, E, vt<E>),
       visit_ty_params: fn@(~[ty_param], E, vt<E>),
       visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt<E>),
       visit_ty_method: fn@(ty_method, E, vt<E>),
@@ -187,9 +187,9 @@ fn visit_enum_def<E>(enum_definition: ast::enum_def, tps: ~[ast::ty_param],
     }
 }
 
-fn skip_ty<E>(_t: @ty, _e: E, _v: vt<E>) {}
+fn skip_ty<E>(_t: @Ty, _e: E, _v: vt<E>) {}
 
-fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
+fn visit_ty<E>(t: @Ty, e: E, v: vt<E>) {
     match t.node {
       ty_box(mt) | ty_uniq(mt) |
       ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => {
@@ -490,7 +490,7 @@ type simple_visitor =
       visit_decl: fn@(@decl),
       visit_expr: fn@(@expr),
       visit_expr_post: fn@(@expr),
-      visit_ty: fn@(@ty),
+      visit_ty: fn@(@Ty),
       visit_ty_params: fn@(~[ty_param]),
       visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id),
       visit_ty_method: fn@(ty_method),
@@ -499,7 +499,7 @@ type simple_visitor =
       visit_struct_field: fn@(@struct_field),
       visit_struct_method: fn@(@method)};
 
-fn simple_ignore_ty(_t: @ty) {}
+fn simple_ignore_ty(_t: @Ty) {}
 
 fn default_simple_visitor() -> simple_visitor {
     return @{visit_mod: fn@(_m: _mod, _sp: span, _id: node_id) { },
@@ -577,7 +577,7 @@ fn mk_simple_visitor(v: simple_visitor) -> vt<()> {
     fn v_expr_post(f: fn@(@expr), ex: @expr, &&_e: (), _v: vt<()>) {
         f(ex);
     }
-    fn v_ty(f: fn@(@ty), ty: @ty, &&e: (), v: vt<()>) {
+    fn v_ty(f: fn@(@Ty), ty: @Ty, &&e: (), v: vt<()>) {
         f(ty);
         visit_ty(ty, e, v);
     }
diff --git a/src/rustc/back/link.rs b/src/rustc/back/link.rs
index 67edf6a32ba..61fd68c193a 100644
--- a/src/rustc/back/link.rs
+++ b/src/rustc/back/link.rs
@@ -1,6 +1,6 @@
 use libc::{c_int, c_uint, c_char};
 use driver::session;
-use session::session;
+use session::Session;
 use lib::llvm::llvm;
 use syntax::attr;
 use middle::ty;
@@ -33,14 +33,14 @@ impl output_type : cmp::Eq {
     pure fn ne(other: &output_type) -> bool { !self.eq(other) }
 }
 
-fn llvm_err(sess: session, msg: ~str) -> ! unsafe {
+fn llvm_err(sess: Session, msg: ~str) -> ! unsafe {
     let cstr = llvm::LLVMRustGetLastError();
     if cstr == ptr::null() {
         sess.fatal(msg);
     } else { sess.fatal(msg + ~": " + str::raw::from_c_str(cstr)); }
 }
 
-fn WriteOutputFile(sess:session,
+fn WriteOutputFile(sess: Session,
         PM: lib::llvm::PassManagerRef, M: ModuleRef,
         Triple: *c_char,
         // FIXME: When #2334 is fixed, change
@@ -69,7 +69,7 @@ mod jit {
         env: *(),
     }
 
-    fn exec(sess: session,
+    fn exec(sess: Session,
             pm: PassManagerRef,
             m: ModuleRef,
             opt: c_int,
@@ -131,7 +131,7 @@ mod write {
         return false;
     }
 
-    fn run_passes(sess: session, llmod: ModuleRef, output: &Path) {
+    fn run_passes(sess: Session, llmod: ModuleRef, output: &Path) {
         let opts = sess.opts;
         if sess.time_llvm_passes() { llvm::LLVMRustEnableTimePasses(); }
         let mut pm = mk_pass_manager();
@@ -384,7 +384,7 @@ mod write {
  *
  */
 
-fn build_link_meta(sess: session, c: ast::crate, output: &Path,
+fn build_link_meta(sess: Session, c: ast::crate, output: &Path,
                    symbol_hasher: &hash::State) -> link_meta {
 
     type provided_metas =
@@ -392,7 +392,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path,
          vers: Option<~str>,
          cmh_items: ~[@ast::meta_item]};
 
-    fn provided_link_metas(sess: session, c: ast::crate) ->
+    fn provided_link_metas(sess: Session, c: ast::crate) ->
        provided_metas {
         let mut name: Option<~str> = None;
         let mut vers: Option<~str> = None;
@@ -454,13 +454,13 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path,
         return truncated_hash_result(symbol_hasher);
     }
 
-    fn warn_missing(sess: session, name: ~str, default: ~str) {
+    fn warn_missing(sess: Session, name: ~str, default: ~str) {
         if !sess.building_library { return; }
         sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default",
                        name, default));
     }
 
-    fn crate_meta_name(sess: session, _crate: ast::crate,
+    fn crate_meta_name(sess: Session, _crate: ast::crate,
                        output: &Path, metas: provided_metas) -> ~str {
         return match metas.name {
               Some(v) => v,
@@ -477,7 +477,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path,
             };
     }
 
-    fn crate_meta_vers(sess: session, _crate: ast::crate,
+    fn crate_meta_vers(sess: Session, _crate: ast::crate,
                        metas: provided_metas) -> ~str {
         return match metas.vers {
               Some(v) => v,
@@ -569,7 +569,7 @@ fn sanitize(s: ~str) -> ~str {
     return result;
 }
 
-fn mangle(sess: session, ss: path) -> ~str {
+fn mangle(sess: Session, ss: path) -> ~str {
     // Follow C++ namespace-mangling style
 
     let mut n = ~"_ZN"; // Begin name-sequence.
@@ -584,7 +584,7 @@ fn mangle(sess: session, ss: path) -> ~str {
     n
 }
 
-fn exported_name(sess: session, path: path, hash: ~str, vers: ~str) -> ~str {
+fn exported_name(sess: Session, path: path, hash: ~str, vers: ~str) -> ~str {
     return mangle(sess,
                   vec::append_one(
                       vec::append_one(path, path_name(sess.ident_of(hash))),
@@ -623,7 +623,7 @@ fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: ~str) -> ~str {
 
 // If the user wants an exe generated we need to invoke
 // cc to link the object file with some libs
-fn link_binary(sess: session,
+fn link_binary(sess: Session,
                obj_filename: &Path,
                out_filename: &Path,
                lm: link_meta) {
diff --git a/src/rustc/back/rpath.rs b/src/rustc/back/rpath.rs
index 8aa7caefc7a..8038d7bb6dd 100644
--- a/src/rustc/back/rpath.rs
+++ b/src/rustc/back/rpath.rs
@@ -13,7 +13,7 @@ pure fn not_win32(os: session::os) -> bool {
   }
 }
 
-fn get_rpath_flags(sess: session::session, out_filename: &Path) -> ~[~str] {
+fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] {
     let os = sess.targ_cfg.os;
 
     // No rpath on windows
@@ -35,7 +35,7 @@ fn get_rpath_flags(sess: session::session, out_filename: &Path) -> ~[~str] {
     rpaths_to_flags(rpaths)
 }
 
-fn get_sysroot_absolute_rt_lib(sess: session::session) -> Path {
+fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path {
     let r = filesearch::relative_target_lib_path(sess.opts.target_triple);
     sess.filesearch.sysroot().push_rel(&r).push(os::dll_filename("rustrt"))
 }
diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs
index e389f3a4bdf..5da8f5475ed 100644
--- a/src/rustc/driver/driver.rs
+++ b/src/rustc/driver/driver.rs
@@ -1,6 +1,6 @@
 // -*- rust -*-
 use metadata::{creader, cstore, filesearch};
-use session::{session, session_, OptLevel, No, Less, Default, Aggressive};
+use session::{Session, Session_, OptLevel, No, Less, Default, Aggressive};
 use syntax::parse;
 use syntax::{ast, codemap};
 use syntax::attr;
@@ -32,7 +32,7 @@ fn source_name(input: input) -> ~str {
     }
 }
 
-fn default_configuration(sess: session, argv0: ~str, input: input) ->
+fn default_configuration(sess: Session, argv0: ~str, input: input) ->
    ast::crate_cfg {
     let libc = match sess.targ_cfg.os {
       session::os_win32 => ~"msvcrt.dll",
@@ -70,7 +70,7 @@ fn append_configuration(cfg: ast::crate_cfg, name: ~str) -> ast::crate_cfg {
     }
 }
 
-fn build_configuration(sess: session, argv0: ~str, input: input) ->
+fn build_configuration(sess: Session, argv0: ~str, input: input) ->
    ast::crate_cfg {
     // Combine the configuration requested by the session (command line) with
     // some default and generated configuration items
@@ -106,7 +106,7 @@ enum input {
     str_input(~str)
 }
 
-fn parse_input(sess: session, cfg: ast::crate_cfg, input: input)
+fn parse_input(sess: Session, cfg: ast::crate_cfg, input: input)
     -> @ast::crate {
     match input {
       file_input(file) => {
@@ -145,7 +145,7 @@ impl compile_upto : cmp::Eq {
     pure fn ne(other: &compile_upto) -> bool { !self.eq(other) }
 }
 
-fn compile_upto(sess: session, cfg: ast::crate_cfg,
+fn compile_upto(sess: Session, cfg: ast::crate_cfg,
                 input: input, upto: compile_upto,
                 outputs: Option<output_filenames>)
     -> {crate: @ast::crate, tcx: Option<ty::ctxt>} {
@@ -277,7 +277,7 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
     return {crate: crate, tcx: Some(ty_cx)};
 }
 
-fn compile_input(sess: session, cfg: ast::crate_cfg, input: input,
+fn compile_input(sess: Session, cfg: ast::crate_cfg, input: input,
                  outdir: &Option<Path>, output: &Option<Path>) {
 
     let upto = if sess.opts.parse_only { cu_parse }
@@ -287,7 +287,7 @@ fn compile_input(sess: session, cfg: ast::crate_cfg, input: input,
     compile_upto(sess, cfg, input, upto, Some(outputs));
 }
 
-fn pretty_print_input(sess: session, cfg: ast::crate_cfg, input: input,
+fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: input,
                       ppm: pp_mode) {
     fn ann_paren_for_expr(node: pprust::ann_node) {
         match node {
@@ -571,7 +571,7 @@ fn build_session_options(binary: ~str,
 }
 
 fn build_session(sopts: @session::options,
-                 demitter: diagnostic::emitter) -> session {
+                 demitter: diagnostic::emitter) -> Session {
     let codemap = codemap::new_codemap();
     let diagnostic_handler =
         diagnostic::mk_handler(Some(demitter));
@@ -581,11 +581,10 @@ fn build_session(sopts: @session::options,
 }
 
 fn build_session_(sopts: @session::options,
-                  cm: codemap::codemap,
+                  cm: codemap::CodeMap,
                   demitter: diagnostic::emitter,
                   span_diagnostic_handler: diagnostic::span_handler)
-               -> session {
-
+               -> Session {
     let target_cfg = build_target_config(sopts, demitter);
     let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
                                                     cm);
@@ -595,7 +594,7 @@ fn build_session_(sopts: @session::options,
         sopts.target_triple,
         sopts.addl_lib_search_paths);
     let lint_settings = lint::mk_lint_settings();
-    session_(@{targ_cfg: target_cfg,
+    Session_(@{targ_cfg: target_cfg,
                opts: sopts,
                cstore: cstore,
                parse_sess: p_s,
@@ -609,7 +608,7 @@ fn build_session_(sopts: @session::options,
                lint_settings: lint_settings})
 }
 
-fn parse_pretty(sess: session, &&name: ~str) -> pp_mode {
+fn parse_pretty(sess: Session, &&name: ~str) -> pp_mode {
     match name {
       ~"normal" => ppm_normal,
       ~"expanded" => ppm_expanded,
@@ -652,7 +651,7 @@ type output_filenames = @{out_filename:Path, obj_filename:Path};
 fn build_output_filenames(input: input,
                           odir: &Option<Path>,
                           ofile: &Option<Path>,
-                          sess: session)
+                          sess: Session)
         -> output_filenames {
     let obj_path;
     let out_path;
@@ -728,7 +727,7 @@ fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! {
     fail;
 }
 
-fn list_metadata(sess: session, path: &Path, out: io::Writer) {
+fn list_metadata(sess: Session, path: &Path, out: io::Writer) {
     metadata::loader::list_file_metadata(
         sess.parse_sess.interner,
         session::sess_os_to_meta_os(sess.targ_cfg.os), path, out);
diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs
index 5833723ec10..59d4e0dfdb8 100644
--- a/src/rustc/driver/rustc.rs
+++ b/src/rustc/driver/rustc.rs
@@ -235,7 +235,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) {
 
         // The 'diagnostics emitter'. Every error, warning, etc. should
         // go through this function.
-        let demitter = fn@(cmsp: Option<(codemap::codemap, codemap::span)>,
+        let demitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>,
                            msg: &str, lvl: diagnostic::level) {
             if lvl == diagnostic::fatal {
                 comm::send(ch, fatal);
diff --git a/src/rustc/driver/session.rs b/src/rustc/driver/session.rs
index 550656c23df..ed73bcb6d72 100644
--- a/src/rustc/driver/session.rs
+++ b/src/rustc/driver/session.rs
@@ -127,24 +127,24 @@ type options =
 
 type crate_metadata = {name: ~str, data: ~[u8]};
 
-type session_ = {targ_cfg: @config,
+type Session_ = {targ_cfg: @config,
                  opts: @options,
-                 cstore: metadata::cstore::cstore,
+                 cstore: metadata::cstore::CStore,
                  parse_sess: parse_sess,
-                 codemap: codemap::codemap,
+                 codemap: codemap::CodeMap,
                  // For a library crate, this is always none
                  mut main_fn: Option<(node_id, codemap::span)>,
                  span_diagnostic: diagnostic::span_handler,
-                 filesearch: filesearch::filesearch,
+                 filesearch: filesearch::FileSearch,
                  mut building_library: bool,
                  working_dir: Path,
                  lint_settings: lint::lint_settings};
 
-enum session {
-    session_(@session_)
+enum Session {
+    Session_(@Session_)
 }
 
-impl session {
+impl Session {
     fn span_fatal(sp: span, msg: ~str) -> ! {
         self.span_diagnostic.span_fatal(sp, msg)
     }
@@ -270,7 +270,7 @@ fn basic_options() -> @options {
 }
 
 // Seems out of place, but it uses session, so I'm putting it here
-fn expect<T: Copy>(sess: session, opt: Option<T>, msg: fn() -> ~str) -> T {
+fn expect<T: Copy>(sess: Session, opt: Option<T>, msg: fn() -> ~str) -> T {
     diagnostic::expect(sess.diagnostic(), opt, msg)
 }
 
diff --git a/src/rustc/front/core_inject.rs b/src/rustc/front/core_inject.rs
index f198a2ca79d..e9be56e7d48 100644
--- a/src/rustc/front/core_inject.rs
+++ b/src/rustc/front/core_inject.rs
@@ -1,4 +1,4 @@
-use driver::session::session;
+use driver::session::Session;
 use syntax::codemap;
 use syntax::ast;
 use syntax::ast_util::*;
@@ -6,7 +6,7 @@ use syntax::attr;
 
 export maybe_inject_libcore_ref;
 
-fn maybe_inject_libcore_ref(sess: session,
+fn maybe_inject_libcore_ref(sess: Session,
                             crate: @ast::crate) -> @ast::crate {
     if use_core(crate) {
         inject_libcore_ref(sess, crate)
@@ -19,7 +19,7 @@ fn use_core(crate: @ast::crate) -> bool {
     !attr::attrs_contains_name(crate.node.attrs, ~"no_core")
 }
 
-fn inject_libcore_ref(sess: session,
+fn inject_libcore_ref(sess: Session,
                       crate: @ast::crate) -> @ast::crate {
 
     fn spanned<T: Copy>(x: T) -> @ast::spanned<T> {
diff --git a/src/rustc/front/intrinsic_inject.rs b/src/rustc/front/intrinsic_inject.rs
index 8fd885e8f8b..ac74bac3f2f 100644
--- a/src/rustc/front/intrinsic_inject.rs
+++ b/src/rustc/front/intrinsic_inject.rs
@@ -1,10 +1,10 @@
-use driver::session::session;
+use driver::session::Session;
 use syntax::parse;
 use syntax::ast;
 
 export inject_intrinsic;
 
-fn inject_intrinsic(sess: session,
+fn inject_intrinsic(sess: Session,
                     crate: @ast::crate) -> @ast::crate {
 
     let intrinsic_module = @include_str!("intrinsic.rs");
diff --git a/src/rustc/front/test.rs b/src/rustc/front/test.rs
index 1a6cc6dd895..f0c9de4f2a2 100644
--- a/src/rustc/front/test.rs
+++ b/src/rustc/front/test.rs
@@ -7,7 +7,7 @@ use syntax::fold;
 use syntax::print::pprust;
 use syntax::codemap::span;
 use driver::session;
-use session::session;
+use session::Session;
 use syntax::attr;
 use dvec::DVec;
 
@@ -19,14 +19,14 @@ type test = {span: span, path: ~[ast::ident],
              ignore: bool, should_fail: bool};
 
 type test_ctxt =
-    @{sess: session::session,
+    @{sess: session::Session,
       crate: @ast::crate,
       mut path: ~[ast::ident],
       testfns: DVec<test>};
 
 // Traverse the crate, collecting all the test functions, eliding any
 // existing main functions, and synthesizing a main test harness
-fn modify_for_testing(sess: session::session,
+fn modify_for_testing(sess: session::Session,
                       crate: @ast::crate) -> @ast::crate {
 
     if sess.opts.test {
@@ -36,7 +36,7 @@ fn modify_for_testing(sess: session::session,
     }
 }
 
-fn generate_test_harness(sess: session::session,
+fn generate_test_harness(sess: session::Session,
                          crate: @ast::crate) -> @ast::crate {
     let cx: test_ctxt =
         @{sess: sess,
@@ -261,13 +261,13 @@ fn mk_path(cx: test_ctxt, path: ~[ast::ident]) -> ~[ast::ident] {
     else { vec::append(~[cx.sess.ident_of(~"std")], path) }
 }
 
-// The ast::ty of ~[std::test::test_desc]
-fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
+// The ast::Ty of ~[std::test::test_desc]
+fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::Ty {
     let test_desc_ty_path =
         path_node(mk_path(cx, ~[cx.sess.ident_of(~"test"),
                                 cx.sess.ident_of(~"TestDesc")]));
 
-    let test_desc_ty: ast::ty =
+    let test_desc_ty: ast::Ty =
         {id: cx.sess.next_node_id(),
          node: ast::ty_path(test_desc_ty_path, cx.sess.next_node_id()),
          span: dummy_sp()};
diff --git a/src/rustc/metadata/creader.rs b/src/rustc/metadata/creader.rs
index 3ed56a1953e..3080426e531 100644
--- a/src/rustc/metadata/creader.rs
+++ b/src/rustc/metadata/creader.rs
@@ -7,7 +7,7 @@ use syntax::visit;
 use syntax::codemap::span;
 use std::map::HashMap;
 use syntax::print::pprust;
-use filesearch::filesearch;
+use filesearch::FileSearch;
 use common::*;
 use dvec::DVec;
 use syntax::parse::token::ident_interner;
@@ -17,7 +17,7 @@ export read_crates;
 // Traverses an AST, reading all the information about use'd crates and extern
 // libraries necessary for later resolving, typechecking, linking, etc.
 fn read_crates(diag: span_handler, crate: ast::crate,
-               cstore: cstore::cstore, filesearch: filesearch,
+               cstore: cstore::CStore, filesearch: FileSearch,
                os: loader::os, static: bool, intr: @ident_interner) {
     let e = @{diag: diag,
               filesearch: filesearch,
@@ -88,8 +88,8 @@ fn warn_if_multiple_versions(e: env, diag: span_handler,
 }
 
 type env = @{diag: span_handler,
-             filesearch: filesearch,
-             cstore: cstore::cstore,
+             filesearch: FileSearch,
+             cstore: cstore::CStore,
              os: loader::os,
              static: bool,
              crate_cache: DVec<cache_entry>,
diff --git a/src/rustc/metadata/csearch.rs b/src/rustc/metadata/csearch.rs
index ea6bd499a3b..d9fccb16de0 100644
--- a/src/rustc/metadata/csearch.rs
+++ b/src/rustc/metadata/csearch.rs
@@ -39,18 +39,18 @@ struct ProvidedTraitMethodInfo {
     def_id: ast::def_id
 }
 
-fn get_symbol(cstore: cstore::cstore, def: ast::def_id) -> ~str {
+fn get_symbol(cstore: cstore::CStore, def: ast::def_id) -> ~str {
     let cdata = cstore::get_crate_data(cstore, def.crate).data;
     return decoder::get_symbol(cdata, def.node);
 }
 
-fn get_type_param_count(cstore: cstore::cstore, def: ast::def_id) -> uint {
+fn get_type_param_count(cstore: cstore::CStore, def: ast::def_id) -> uint {
     let cdata = cstore::get_crate_data(cstore, def.crate).data;
     return decoder::get_type_param_count(cdata, def.node);
 }
 
 /// Iterates over all the paths in the given crate.
-fn each_path(cstore: cstore::cstore, cnum: ast::crate_num,
+fn each_path(cstore: cstore::CStore, cnum: ast::crate_num,
              f: fn(decoder::path_entry) -> bool) {
     let crate_data = cstore::get_crate_data(cstore, cnum);
     decoder::each_path(cstore.intr, crate_data, f);
@@ -91,7 +91,7 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id)
     return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
 }
 
-fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id,
+fn get_impls_for_mod(cstore: cstore::CStore, def: ast::def_id,
                      name: Option<ast::ident>)
     -> @~[@decoder::_impl] {
     let cdata = cstore::get_crate_data(cstore, def.crate);
@@ -113,14 +113,14 @@ fn get_provided_trait_methods(tcx: ty::ctxt, def: ast::def_id) ->
     decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx)
 }
 
-fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id)
+fn get_method_names_if_trait(cstore: cstore::CStore, def: ast::def_id)
     -> Option<@DVec<(ast::ident, ast::self_ty_)>> {
 
     let cdata = cstore::get_crate_data(cstore, def.crate);
     return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node);
 }
 
-fn get_item_attrs(cstore: cstore::cstore,
+fn get_item_attrs(cstore: cstore::CStore,
                   def_id: ast::def_id,
                   f: fn(~[@ast::meta_item])) {
 
@@ -140,7 +140,7 @@ fn get_type(tcx: ty::ctxt, def: ast::def_id) -> ty::ty_param_bounds_and_ty {
     decoder::get_type(cdata, def.node, tcx)
 }
 
-fn get_region_param(cstore: metadata::cstore::cstore,
+fn get_region_param(cstore: metadata::cstore::CStore,
                     def: ast::def_id) -> Option<ty::region_variance> {
     let cdata = cstore::get_crate_data(cstore, def.crate);
     return decoder::get_region_param(cdata, def.node);
@@ -177,7 +177,7 @@ fn get_impl_traits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] {
     decoder::get_impl_traits(cdata, def.node, tcx)
 }
 
-fn get_impl_method(cstore: cstore::cstore,
+fn get_impl_method(cstore: cstore::CStore,
                    def: ast::def_id, mname: ast::ident)
     -> ast::def_id {
     let cdata = cstore::get_crate_data(cstore, def.crate);
@@ -188,7 +188,7 @@ fn get_impl_method(cstore: cstore::cstore,
    for their methods (so that get_trait_methods can be reused to get
    class methods), classes require a slightly different version of
    get_impl_method. Sigh. */
-fn get_class_method(cstore: cstore::cstore,
+fn get_class_method(cstore: cstore::CStore,
                     def: ast::def_id, mname: ast::ident)
     -> ast::def_id {
     let cdata = cstore::get_crate_data(cstore, def.crate);
@@ -196,7 +196,7 @@ fn get_class_method(cstore: cstore::cstore,
 }
 
 /* If def names a class with a dtor, return it. Otherwise, return none. */
-fn class_dtor(cstore: cstore::cstore, def: ast::def_id)
+fn class_dtor(cstore: cstore::CStore, def: ast::def_id)
     -> Option<ast::def_id> {
     let cdata = cstore::get_crate_data(cstore, def.crate);
     decoder::class_dtor(cdata, def.node)
diff --git a/src/rustc/metadata/cstore.rs b/src/rustc/metadata/cstore.rs
index 483f7ea06a9..4bbca3a0605 100644
--- a/src/rustc/metadata/cstore.rs
+++ b/src/rustc/metadata/cstore.rs
@@ -6,7 +6,7 @@ use std::map::HashMap;
 use syntax::{ast, attr};
 use syntax::parse::token::ident_interner;
 
-export cstore;
+export CStore;
 export cnum_map;
 export crate_metadata;
 export mk_cstore;
@@ -49,7 +49,7 @@ type crate_metadata = @{name: ~str,
 // other modules to access the cstore's private data. This could also be
 // achieved with an obj, but at the expense of a vtable. Not sure if this is a
 // good pattern or not.
-enum cstore { private(cstore_private), }
+enum CStore { private(cstore_private), }
 
 type cstore_private =
     @{metas: map::HashMap<ast::crate_num, crate_metadata>,
@@ -64,11 +64,11 @@ type cstore_private =
 type use_crate_map = map::HashMap<ast::node_id, ast::crate_num>;
 
 // Internal method to retrieve the data from the cstore
-pure fn p(cstore: cstore) -> cstore_private {
+pure fn p(cstore: CStore) -> cstore_private {
     match cstore { private(p) => p }
 }
 
-fn mk_cstore(intr: @ident_interner) -> cstore {
+fn mk_cstore(intr: @ident_interner) -> CStore {
     let meta_cache = map::HashMap();
     let crate_map = map::HashMap();
     let mod_path_map = HashMap();
@@ -81,21 +81,21 @@ fn mk_cstore(intr: @ident_interner) -> cstore {
                      intr: intr});
 }
 
-fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata {
+fn get_crate_data(cstore: CStore, cnum: ast::crate_num) -> crate_metadata {
     return p(cstore).metas.get(cnum);
 }
 
-fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> ~str {
+fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str {
     let cdata = get_crate_data(cstore, cnum);
     return decoder::get_crate_hash(cdata.data);
 }
 
-fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> ~str {
+fn get_crate_vers(cstore: CStore, cnum: ast::crate_num) -> ~str {
     let cdata = get_crate_data(cstore, cnum);
     return decoder::get_crate_vers(cdata.data);
 }
 
-fn set_crate_data(cstore: cstore, cnum: ast::crate_num,
+fn set_crate_data(cstore: CStore, cnum: ast::crate_num,
                   data: crate_metadata) {
     p(cstore).metas.insert(cnum, data);
     for vec::each(decoder::get_crate_module_paths(cstore.intr, data)) |dp| {
@@ -105,25 +105,25 @@ fn set_crate_data(cstore: cstore, cnum: ast::crate_num,
     }
 }
 
-fn have_crate_data(cstore: cstore, cnum: ast::crate_num) -> bool {
+fn have_crate_data(cstore: CStore, cnum: ast::crate_num) -> bool {
     return p(cstore).metas.contains_key(cnum);
 }
 
-fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) {
+fn iter_crate_data(cstore: CStore, i: fn(ast::crate_num, crate_metadata)) {
     for p(cstore).metas.each |k,v| { i(k, v);};
 }
 
-fn add_used_crate_file(cstore: cstore, lib: &Path) {
+fn add_used_crate_file(cstore: CStore, lib: &Path) {
     if !vec::contains(p(cstore).used_crate_files, lib) {
         p(cstore).used_crate_files.push(copy *lib);
     }
 }
 
-fn get_used_crate_files(cstore: cstore) -> ~[Path] {
+fn get_used_crate_files(cstore: CStore) -> ~[Path] {
     return p(cstore).used_crate_files;
 }
 
-fn add_used_library(cstore: cstore, lib: ~str) -> bool {
+fn add_used_library(cstore: CStore, lib: ~str) -> bool {
     assert lib != ~"";
 
     if vec::contains(p(cstore).used_libraries, &lib) { return false; }
@@ -131,31 +131,31 @@ fn add_used_library(cstore: cstore, lib: ~str) -> bool {
     return true;
 }
 
-fn get_used_libraries(cstore: cstore) -> ~[~str] {
+fn get_used_libraries(cstore: CStore) -> ~[~str] {
     return p(cstore).used_libraries;
 }
 
-fn add_used_link_args(cstore: cstore, args: ~str) {
+fn add_used_link_args(cstore: CStore, args: ~str) {
     p(cstore).used_link_args.push_all(str::split_char(args, ' '));
 }
 
-fn get_used_link_args(cstore: cstore) -> ~[~str] {
+fn get_used_link_args(cstore: CStore) -> ~[~str] {
     return p(cstore).used_link_args;
 }
 
-fn add_use_stmt_cnum(cstore: cstore, use_id: ast::node_id,
+fn add_use_stmt_cnum(cstore: CStore, use_id: ast::node_id,
                      cnum: ast::crate_num) {
     p(cstore).use_crate_map.insert(use_id, cnum);
 }
 
-fn find_use_stmt_cnum(cstore: cstore,
+fn find_use_stmt_cnum(cstore: CStore,
                       use_id: ast::node_id) -> Option<ast::crate_num> {
     p(cstore).use_crate_map.find(use_id)
 }
 
 // returns hashes of crates directly used by this crate. Hashes are
 // sorted by crate name.
-fn get_dep_hashes(cstore: cstore) -> ~[~str] {
+fn get_dep_hashes(cstore: CStore) -> ~[~str] {
     type crate_hash = {name: ~str, hash: ~str};
     let mut result = ~[];
 
@@ -175,7 +175,7 @@ fn get_dep_hashes(cstore: cstore) -> ~[~str] {
     return vec::map(sorted, mapper);
 }
 
-fn get_path(cstore: cstore, d: ast::def_id) -> ~[~str] {
+fn get_path(cstore: CStore, d: ast::def_id) -> ~[~str] {
     option::map_default(&p(cstore).mod_path_map.find(d), ~[],
                         |ds| str::split_str(**ds, ~"::"))
 }
diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs
index 652af81659a..0b25f0670b4 100644
--- a/src/rustc/metadata/encoder.rs
+++ b/src/rustc/metadata/encoder.rs
@@ -52,7 +52,7 @@ type encode_parms = {
     item_symbols: HashMap<ast::node_id, ~str>,
     discrim_symbols: HashMap<ast::node_id, ~str>,
     link_meta: link_meta,
-    cstore: cstore::cstore,
+    cstore: cstore::CStore,
     encode_inlined_item: encode_inlined_item
 };
 
@@ -77,7 +77,7 @@ enum encode_ctxt = {
     item_symbols: HashMap<ast::node_id, ~str>,
     discrim_symbols: HashMap<ast::node_id, ~str>,
     link_meta: link_meta,
-    cstore: cstore::cstore,
+    cstore: cstore::CStore,
     encode_inlined_item: encode_inlined_item,
     type_abbrevs: abbrev_map
 };
@@ -1035,9 +1035,9 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] {
 }
 
 fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::Serializer,
-                     cstore: cstore::cstore) {
+                     cstore: cstore::CStore) {
 
-    fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::cstore)
+    fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::CStore)
         -> ~[decoder::crate_dep] {
 
         type hashkv = @{key: crate_num, val: cstore::crate_metadata};
diff --git a/src/rustc/metadata/filesearch.rs b/src/rustc/metadata/filesearch.rs
index 63370b09321..b2d20ce56e8 100644
--- a/src/rustc/metadata/filesearch.rs
+++ b/src/rustc/metadata/filesearch.rs
@@ -3,7 +3,7 @@
 // probably just be folded into cstore.
 
 use result::Result;
-export filesearch;
+export FileSearch;
 export mk_filesearch;
 export pick;
 export pick_file;
@@ -21,7 +21,7 @@ fn pick_file(file: Path, path: &Path) -> Option<Path> {
     else { option::None }
 }
 
-trait filesearch {
+trait FileSearch {
     fn sysroot() -> Path;
     fn lib_search_paths() -> ~[Path];
     fn get_target_lib_path() -> Path;
@@ -30,11 +30,11 @@ trait filesearch {
 
 fn mk_filesearch(maybe_sysroot: Option<Path>,
                  target_triple: &str,
-                 addl_lib_search_paths: ~[Path]) -> filesearch {
+                 addl_lib_search_paths: ~[Path]) -> FileSearch {
     type filesearch_impl = {sysroot: Path,
                             addl_lib_search_paths: ~[Path],
                             target_triple: ~str};
-    impl filesearch_impl: filesearch {
+    impl filesearch_impl: FileSearch {
         fn sysroot() -> Path { self.sysroot }
         fn lib_search_paths() -> ~[Path] {
             let mut paths = self.addl_lib_search_paths;
@@ -64,10 +64,10 @@ fn mk_filesearch(maybe_sysroot: Option<Path>,
     debug!("using sysroot = %s", sysroot.to_str());
     {sysroot: sysroot,
      addl_lib_search_paths: addl_lib_search_paths,
-     target_triple: str::from_slice(target_triple)} as filesearch
+     target_triple: str::from_slice(target_triple)} as FileSearch
 }
 
-fn search<T: Copy>(filesearch: filesearch, pick: pick<T>) -> Option<T> {
+fn search<T: Copy>(filesearch: FileSearch, pick: pick<T>) -> Option<T> {
     let mut rslt = None;
     for filesearch.lib_search_paths().each |lib_search_path| {
         debug!("searching %s", lib_search_path.to_str());
diff --git a/src/rustc/metadata/loader.rs b/src/rustc/metadata/loader.rs
index 0a8354be71f..61b8bcf9067 100644
--- a/src/rustc/metadata/loader.rs
+++ b/src/rustc/metadata/loader.rs
@@ -5,7 +5,7 @@ use syntax::{ast, attr};
 use syntax::print::pprust;
 use syntax::codemap::span;
 use lib::llvm::{False, llvm, mk_object_file, mk_section_iter};
-use filesearch::filesearch;
+use filesearch::FileSearch;
 use io::WriterUtil;
 use syntax::parse::token::ident_interner;
 
@@ -28,7 +28,7 @@ enum os {
 
 type ctxt = {
     diag: span_handler,
-    filesearch: filesearch,
+    filesearch: FileSearch,
     span: span,
     ident: ast::ident,
     metas: ~[@ast::meta_item],
@@ -66,7 +66,7 @@ fn libname(cx: ctxt) -> {prefix: ~str, suffix: ~str} {
 
 fn find_library_crate_aux(cx: ctxt,
                           nn: {prefix: ~str, suffix: ~str},
-                          filesearch: filesearch::filesearch) ->
+                          filesearch: filesearch::FileSearch) ->
    Option<{ident: ~str, data: @~[u8]}> {
     let crate_name = crate_name_from_metas(cx.metas);
     let prefix: ~str = nn.prefix + crate_name + ~"-";
diff --git a/src/rustc/metadata/tydecode.rs b/src/rustc/metadata/tydecode.rs
index 1375ff2d0be..14aef6db1ad 100644
--- a/src/rustc/metadata/tydecode.rs
+++ b/src/rustc/metadata/tydecode.rs
@@ -162,7 +162,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region {
     }
 }
 
-fn parse_region(st: @pstate) -> ty::region {
+fn parse_region(st: @pstate) -> ty::Region {
     match next(st) {
       'b' => {
         ty::re_bound(parse_bound_region(st))
diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs
index 69689b16e15..941dd35bdf0 100644
--- a/src/rustc/metadata/tyencode.rs
+++ b/src/rustc/metadata/tyencode.rs
@@ -125,7 +125,7 @@ fn enc_substs(w: io::Writer, cx: @ctxt, substs: ty::substs) {
     w.write_char(']');
 }
 
-fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) {
+fn enc_region(w: io::Writer, cx: @ctxt, r: ty::Region) {
     match r {
       ty::re_bound(br) => {
         w.write_char('b');
diff --git a/src/rustc/middle/astencode.rs b/src/rustc/middle/astencode.rs
index d1f766dd867..b47e6d3b151 100644
--- a/src/rustc/middle/astencode.rs
+++ b/src/rustc/middle/astencode.rs
@@ -19,7 +19,7 @@ use middle::{ty, typeck};
 use middle::typeck::{method_origin, method_map_entry,
                      vtable_res,
                      vtable_origin};
-use driver::session::session;
+use driver::session::Session;
 use middle::freevars::freevar_entry;
 use c = metadata::common;
 use e = metadata::encoder;
@@ -136,7 +136,7 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
 // ______________________________________________________________________
 // Enumerating the IDs which appear in an AST
 
-fn reserve_id_range(sess: session,
+fn reserve_id_range(sess: Session,
                     from_id_range: ast_util::id_range) -> ast_util::id_range {
     // Handle the case of an empty range:
     if ast_util::empty(from_id_range) { return from_id_range; }
@@ -379,8 +379,8 @@ impl ty::AutoRef: tr {
     }
 }
 
-impl ty::region: tr {
-    fn tr(xcx: extended_decode_ctxt) -> ty::region {
+impl ty::Region: tr {
+    fn tr(xcx: extended_decode_ctxt) -> ty::Region {
         match self {
             ty::re_bound(br) => ty::re_bound(br.tr(xcx)),
             ty::re_free(id, br) => ty::re_free(xcx.tr_id(id), br.tr(xcx)),
diff --git a/src/rustc/middle/borrowck.rs b/src/rustc/middle/borrowck.rs
index 02fd2998f4d..db0e092ed83 100644
--- a/src/rustc/middle/borrowck.rs
+++ b/src/rustc/middle/borrowck.rs
@@ -229,7 +229,6 @@ use result::{Result, Ok, Err};
 use syntax::print::pprust;
 use util::common::indenter;
 use ty::to_str;
-use driver::session::session;
 use dvec::DVec;
 use mem_categorization::*;
 
@@ -319,8 +318,8 @@ enum bckerr_code {
     err_mut_variant,
     err_root_not_permitted,
     err_mutbl(ast::mutability),
-    err_out_of_root_scope(ty::region, ty::region), // superscope, subscope
-    err_out_of_scope(ty::region, ty::region) // superscope, subscope
+    err_out_of_root_scope(ty::Region, ty::Region), // superscope, subscope
+    err_out_of_scope(ty::Region, ty::Region) // superscope, subscope
 }
 
 impl bckerr_code : cmp::Eq {
@@ -436,7 +435,7 @@ fn root_map() -> root_map {
 // Misc
 
 impl borrowck_ctxt {
-    fn is_subregion_of(r_sub: ty::region, r_sup: ty::region) -> bool {
+    fn is_subregion_of(r_sub: ty::Region, r_sup: ty::Region) -> bool {
         region::is_subregion_of(self.tcx.region_map, r_sub, r_sup)
     }
 
diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs
index e8d11fd1708..e0eb5519d4d 100644
--- a/src/rustc/middle/borrowck/gather_loans.rs
+++ b/src/rustc/middle/borrowck/gather_loans.rs
@@ -260,7 +260,7 @@ impl gather_loan_ctxt {
     fn guarantee_valid(&self,
                        cmt: cmt,
                        req_mutbl: ast::mutability,
-                       scope_r: ty::region) {
+                       scope_r: ty::Region) {
 
         self.bccx.guaranteed_paths += 1;
 
@@ -390,7 +390,7 @@ impl gather_loan_ctxt {
     fn add_loans(&self,
                  cmt: cmt,
                  req_mutbl: ast::mutability,
-                 scope_r: ty::region,
+                 scope_r: ty::Region,
                  +loans: ~[Loan]) {
         if loans.len() == 0 {
             return;
diff --git a/src/rustc/middle/borrowck/loan.rs b/src/rustc/middle/borrowck/loan.rs
index 5d3ccc39213..7f4f857dae8 100644
--- a/src/rustc/middle/borrowck/loan.rs
+++ b/src/rustc/middle/borrowck/loan.rs
@@ -7,7 +7,7 @@ use result::{Result, Ok, Err};
 
 impl borrowck_ctxt {
     fn loan(cmt: cmt,
-            scope_region: ty::region,
+            scope_region: ty::Region,
             mutbl: ast::mutability) -> bckres<~[Loan]> {
         let lc = LoanContext {
             bccx: self,
@@ -28,7 +28,7 @@ struct LoanContext {
     bccx: borrowck_ctxt,
 
     // the region scope for which we must preserve the memory
-    scope_region: ty::region,
+    scope_region: ty::Region,
 
     // accumulated list of loans that will be required
     mut loans: ~[Loan]
@@ -39,7 +39,7 @@ impl LoanContext {
 
     fn issue_loan(&self,
                   cmt: cmt,
-                  scope_ub: ty::region,
+                  scope_ub: ty::Region,
                   req_mutbl: ast::mutability) -> bckres<()> {
         if self.bccx.is_subregion_of(self.scope_region, scope_ub) {
             match req_mutbl {
diff --git a/src/rustc/middle/borrowck/preserve.rs b/src/rustc/middle/borrowck/preserve.rs
index 7e1d47eed69..556ea7867cf 100644
--- a/src/rustc/middle/borrowck/preserve.rs
+++ b/src/rustc/middle/borrowck/preserve.rs
@@ -23,7 +23,7 @@ impl preserve_condition {
 
 impl borrowck_ctxt {
     fn preserve(cmt: cmt,
-                scope_region: ty::region,
+                scope_region: ty::Region,
                 item_ub: ast::node_id,
                 root_ub: ast::node_id)
         -> bckres<preserve_condition> {
@@ -41,7 +41,7 @@ enum preserve_ctxt = {
     bccx: borrowck_ctxt,
 
     // the region scope for which we must preserve the memory
-    scope_region: ty::region,
+    scope_region: ty::Region,
 
     // the scope for the body of the enclosing fn/method item
     item_ub: ast::node_id,
@@ -277,7 +277,7 @@ priv impl &preserve_ctxt {
     /// Checks that the scope for which the value must be preserved
     /// is a subscope of `scope_ub`; if so, success.
     fn compare_scope(cmt: cmt,
-                     scope_ub: ty::region) -> bckres<preserve_condition> {
+                     scope_ub: ty::Region) -> bckres<preserve_condition> {
         if self.bccx.is_subregion_of(self.scope_region, scope_ub) {
             Ok(pc_ok)
         } else {
diff --git a/src/rustc/middle/capture.rs b/src/rustc/middle/capture.rs
index 618d43e121a..563ea8f84be 100644
--- a/src/rustc/middle/capture.rs
+++ b/src/rustc/middle/capture.rs
@@ -1,5 +1,4 @@
 use syntax::{ast, ast_util};
-use driver::session::session;
 use syntax::codemap::span;
 use std::map;
 use std::map::HashMap;
diff --git a/src/rustc/middle/check_alt.rs b/src/rustc/middle/check_alt.rs
index aab470f6907..fc040ecc4cd 100644
--- a/src/rustc/middle/check_alt.rs
+++ b/src/rustc/middle/check_alt.rs
@@ -7,7 +7,6 @@ use syntax::print::pprust::pat_to_str;
 use util::ppaux::ty_to_str;
 use pat_util::*;
 use syntax::visit;
-use driver::session::session;
 use middle::ty;
 use middle::ty::*;
 use std::map::HashMap;
diff --git a/src/rustc/middle/check_const.rs b/src/rustc/middle/check_const.rs
index bd3abe20134..bdc042fb764 100644
--- a/src/rustc/middle/check_const.rs
+++ b/src/rustc/middle/check_const.rs
@@ -1,10 +1,10 @@
 use syntax::ast::*;
 use syntax::{visit, ast_util, ast_map};
-use driver::session::session;
+use driver::session::Session;
 use std::map::HashMap;
 use dvec::DVec;
 
-fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map,
+fn check_crate(sess: Session, crate: @crate, ast_map: ast_map::map,
                def_map: resolve::DefMap,
                 method_map: typeck::method_map, tcx: ty::ctxt) {
     visit::visit_crate(*crate, false, visit::mk_vt(@{
@@ -17,7 +17,7 @@ fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map,
     sess.abort_if_errors();
 }
 
-fn check_item(sess: session, ast_map: ast_map::map,
+fn check_item(sess: Session, ast_map: ast_map::map,
               def_map: resolve::DefMap,
               it: @item, &&_is_const: bool, v: visit::vt<bool>) {
     match it.node {
@@ -55,7 +55,7 @@ fn check_pat(p: @pat, &&_is_const: bool, v: visit::vt<bool>) {
     }
 }
 
-fn check_expr(sess: session, def_map: resolve::DefMap,
+fn check_expr(sess: Session, def_map: resolve::DefMap,
               method_map: typeck::method_map, tcx: ty::ctxt,
               e: @expr, &&is_const: bool, v: visit::vt<bool>) {
     if is_const {
@@ -142,12 +142,12 @@ fn check_expr(sess: session, def_map: resolve::DefMap,
 
 // Make sure a const item doesn't recursively refer to itself
 // FIXME: Should use the dependency graph when it's available (#1356)
-fn check_item_recursion(sess: session, ast_map: ast_map::map,
+fn check_item_recursion(sess: Session, ast_map: ast_map::map,
                         def_map: resolve::DefMap, it: @item) {
 
     type env = {
         root_it: @item,
-        sess: session,
+        sess: Session,
         ast_map: ast_map::map,
         def_map: resolve::DefMap,
         idstack: @DVec<node_id>,
diff --git a/src/rustc/middle/check_loop.rs b/src/rustc/middle/check_loop.rs
index 3cd26f3039b..3fa7f34fb33 100644
--- a/src/rustc/middle/check_loop.rs
+++ b/src/rustc/middle/check_loop.rs
@@ -1,6 +1,5 @@
 use syntax::ast::*;
 use syntax::visit;
-use driver::session::session;
 
 type ctx = {in_loop: bool, can_ret: bool};
 
diff --git a/src/rustc/middle/kind.rs b/src/rustc/middle/kind.rs
index 36a05d66506..e4dc9e8330e 100644
--- a/src/rustc/middle/kind.rs
+++ b/src/rustc/middle/kind.rs
@@ -1,8 +1,7 @@
 use syntax::{visit, ast_util};
 use syntax::ast::*;
 use syntax::codemap::span;
-use ty::{kind, kind_copyable, kind_noncopyable, kind_const};
-use driver::session::session;
+use middle::ty::{Kind, kind_copyable, kind_noncopyable, kind_const};
 use std::map::HashMap;
 use util::ppaux::{ty_to_str, tys_to_str};
 use syntax::print::pprust::expr_to_str;
@@ -40,7 +39,7 @@ use lint::{non_implicitly_copyable_typarams,implicit_copies};
 
 const try_adding: &str = "Try adding a move";
 
-fn kind_to_str(k: kind) -> ~str {
+fn kind_to_str(k: Kind) -> ~str {
     let mut kinds = ~[];
 
     if ty::kind_lteq(kind_const(), k) {
@@ -387,7 +386,7 @@ fn check_stmt(stmt: @stmt, cx: ctx, v: visit::vt<ctx>) {
     visit::visit_stmt(stmt, cx, v);
 }
 
-fn check_ty(aty: @ty, cx: ctx, v: visit::vt<ctx>) {
+fn check_ty(aty: @Ty, cx: ctx, v: visit::vt<ctx>) {
     match aty.node {
       ty_path(_, id) => {
         do option::iter(&cx.tcx.node_type_substs.find(id)) |ts| {
diff --git a/src/rustc/middle/lang_items.rs b/src/rustc/middle/lang_items.rs
index 7cb2c9eb9cf..383fe2db323 100644
--- a/src/rustc/middle/lang_items.rs
+++ b/src/rustc/middle/lang_items.rs
@@ -9,7 +9,7 @@
 //
 // * Functions called by the compiler itself.
 
-use driver::session::session;
+use driver::session::Session;
 use metadata::csearch::{each_path, get_item_attrs};
 use metadata::cstore::{iter_crate_data};
 use metadata::decoder::{dl_def, dl_field, dl_impl};
@@ -50,7 +50,7 @@ struct LanguageItems {
     mut log_type_fn: Option<def_id>
 }
 
-mod LanguageItems {
+mod language_items {
     #[legacy_exports];
     fn make() -> LanguageItems {
         LanguageItems {
@@ -83,7 +83,7 @@ mod LanguageItems {
     }
 }
 
-fn LanguageItemCollector(crate: @crate, session: session,
+fn LanguageItemCollector(crate: @crate, session: Session,
                          items: &r/LanguageItems)
     -> LanguageItemCollector/&r {
 
@@ -127,7 +127,7 @@ struct LanguageItemCollector {
     items: &LanguageItems,
 
     crate: @crate,
-    session: session,
+    session: Session,
 
     item_refs: HashMap<~str,&mut Option<def_id>>,
 }
@@ -239,8 +239,8 @@ impl LanguageItemCollector {
     }
 }
 
-fn collect_language_items(crate: @crate, session: session) -> LanguageItems {
-    let items = LanguageItems::make();
+fn collect_language_items(crate: @crate, session: Session) -> LanguageItems {
+    let items = language_items::make();
     let collector = LanguageItemCollector(crate, session, &items);
     collector.collect();
     copy items
diff --git a/src/rustc/middle/lint.rs b/src/rustc/middle/lint.rs
index 0f31f2056a1..0768a092522 100644
--- a/src/rustc/middle/lint.rs
+++ b/src/rustc/middle/lint.rs
@@ -1,5 +1,5 @@
 use driver::session;
-use driver::session::session;
+use driver::session::Session;
 use middle::ty;
 use syntax::{ast, ast_util, visit};
 use syntax::attr;
@@ -244,7 +244,7 @@ fn clone_lint_modes(modes: lint_modes) -> lint_modes {
 type ctxt_ = {dict: lint_dict,
               curr: lint_modes,
               is_default: bool,
-              sess: session};
+              sess: Session};
 
 enum ctxt {
     ctxt_(ctxt_)
@@ -355,7 +355,7 @@ fn build_settings_item(i: @ast::item, &&cx: ctxt, v: visit::vt<ctxt>) {
     }
 }
 
-fn build_settings_crate(sess: session::session, crate: @ast::crate) {
+fn build_settings_crate(sess: session::Session, crate: @ast::crate) {
 
     let cx = ctxt_({dict: get_lint_dict(),
                     curr: std::smallintmap::mk(),
diff --git a/src/rustc/middle/liveness.rs b/src/rustc/middle/liveness.rs
index a0a422bc027..89d5c842a9f 100644
--- a/src/rustc/middle/liveness.rs
+++ b/src/rustc/middle/liveness.rs
@@ -99,7 +99,6 @@ use syntax::print::pprust::{expr_to_str};
 use visit::vt;
 use syntax::codemap::span;
 use syntax::ast::*;
-use driver::session::session;
 use io::WriterUtil;
 use capture::{cap_move, cap_drop, cap_copy, cap_ref};
 
diff --git a/src/rustc/middle/mem_categorization.rs b/src/rustc/middle/mem_categorization.rs
index dc5874ea2cf..a61cb28c16b 100644
--- a/src/rustc/middle/mem_categorization.rs
+++ b/src/rustc/middle/mem_categorization.rs
@@ -122,7 +122,7 @@ impl categorization : cmp::Eq {
 enum ptr_kind {
     uniq_ptr,
     gc_ptr,
-    region_ptr(ty::region),
+    region_ptr(ty::Region),
     unsafe_ptr
 }
 
@@ -993,7 +993,7 @@ impl &mem_categorization_ctxt {
         }
     }
 
-    fn region_to_str(r: ty::region) -> ~str {
+    fn region_to_str(r: ty::Region) -> ~str {
         region_to_str(self.tcx, r)
     }
 }
diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs
index eb0bf8796f0..5c70cd3e279 100644
--- a/src/rustc/middle/region.rs
+++ b/src/rustc/middle/region.rs
@@ -7,7 +7,7 @@ region parameterized.
 
 */
 
-use driver::session::session;
+use driver::session::Session;
 use middle::ty;
 use syntax::{ast, visit};
 use syntax::codemap::span;
@@ -41,7 +41,7 @@ Encodes the bounding lifetime for a given AST node:
 type region_map = HashMap<ast::node_id, ast::node_id>;
 
 struct ctxt {
-    sess: session,
+    sess: Session,
     def_map: resolve::DefMap,
 
     // Generated maps:
@@ -108,8 +108,8 @@ fn scope_contains(region_map: region_map, superscope: ast::node_id,
 /// intended to run *after inference* and sadly the logic is somewhat
 /// duplicated with the code in infer.rs.
 fn is_subregion_of(region_map: region_map,
-                   sub_region: ty::region,
-                   super_region: ty::region) -> bool {
+                   sub_region: ty::Region,
+                   super_region: ty::Region) -> bool {
     sub_region == super_region ||
         match (sub_region, super_region) {
           (_, ty::re_static) => {
@@ -328,7 +328,7 @@ fn resolve_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
     visit::visit_fn(fk, decl, body, sp, id, fn_cx, visitor);
 }
 
-fn resolve_crate(sess: session, def_map: resolve::DefMap,
+fn resolve_crate(sess: Session, def_map: resolve::DefMap,
                  crate: @ast::crate) -> region_map {
     let cx: ctxt = ctxt {sess: sess,
                          def_map: def_map,
@@ -382,7 +382,7 @@ impl region_dep : cmp::Eq {
 }
 
 type determine_rp_ctxt_ = {
-    sess: session,
+    sess: Session,
     ast_map: ast_map::map,
     def_map: resolve::DefMap,
     region_paramd_items: region_paramd_items,
@@ -599,7 +599,7 @@ fn determine_rp_in_ty_method(ty_m: ast::ty_method,
     }
 }
 
-fn determine_rp_in_ty(ty: @ast::ty,
+fn determine_rp_in_ty(ty: @ast::Ty,
                       &&cx: determine_rp_ctxt,
                       visitor: visit::vt<determine_rp_ctxt>) {
 
@@ -755,7 +755,7 @@ fn determine_rp_in_struct_field(cm: @ast::struct_field,
     }
 }
 
-fn determine_rp_in_crate(sess: session,
+fn determine_rp_in_crate(sess: Session,
                          ast_map: ast_map::map,
                          def_map: resolve::DefMap,
                          crate: @ast::crate) -> region_paramd_items {
diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs
index 4f170fd050b..81ea6daf195 100644
--- a/src/rustc/middle/resolve.rs
+++ b/src/rustc/middle/resolve.rs
@@ -1,4 +1,4 @@
-use driver::session::session;
+use driver::session::Session;
 use metadata::csearch::{each_path, get_method_names_if_trait};
 use metadata::cstore::find_use_stmt_cnum;
 use metadata::decoder::{def_like, dl_def, dl_field, dl_impl};
@@ -35,7 +35,7 @@ use syntax::ast::{pat_box, pat_lit, pat_range, pat_rec, pat_struct};
 use syntax::ast::{pat_tup, pat_uniq, pat_wild, private, provided, public};
 use syntax::ast::{required, rem, self_ty_, shl, shr, stmt_decl};
 use syntax::ast::{struct_field, struct_variant_kind, sty_static, subtract};
-use syntax::ast::{trait_ref, tuple_variant_kind, ty, ty_bool, ty_char};
+use syntax::ast::{trait_ref, tuple_variant_kind, Ty, ty_bool, ty_char};
 use syntax::ast::{ty_f, ty_f32, ty_f64, ty_float, ty_i, ty_i16, ty_i32};
 use syntax::ast::{ty_i64, ty_i8, ty_int, ty_param, ty_path, ty_str, ty_u};
 use syntax::ast::{ty_u16, ty_u32, ty_u64, ty_u8, ty_uint, type_value_ns};
@@ -115,7 +115,6 @@ impl PatternBindingMode : cmp::Eq {
 
 
 enum Namespace {
-    ModuleNS,
     TypeNS,
     ValueNS
 }
@@ -166,19 +165,8 @@ enum CaptureClause {
 
 type ResolveVisitor = vt<()>;
 
-enum ModuleDef {
-    NoModuleDef,                     // Does not define a module.
-    ModuleDef(Privacy, @Module),     // Defines a module.
-}
-
-impl ModuleDef {
-    pure fn is_none() -> bool {
-        match self { NoModuleDef => true, _ => false }
-    }
-}
-
 enum ImportDirectiveNS {
-    ModuleNSOnly,
+    TypeNSOnly,
     AnyNS
 }
 
@@ -363,7 +351,6 @@ struct ImportResolution {
 
     mut outstanding_references: uint,
 
-    mut module_target: Option<Target>,
     mut value_target: Option<Target>,
     mut type_target: Option<Target>,
 
@@ -375,7 +362,6 @@ fn ImportResolution(privacy: Privacy, span: span) -> ImportResolution {
         privacy: privacy,
         span: span,
         outstanding_references: 0u,
-        module_target: None,
         value_target: None,
         type_target: None,
         used: false
@@ -385,7 +371,6 @@ fn ImportResolution(privacy: Privacy, span: span) -> ImportResolution {
 impl ImportResolution {
     fn target_for_namespace(namespace: Namespace) -> Option<Target> {
         match namespace {
-            ModuleNS    => return copy self.module_target,
             TypeNS      => return copy self.type_target,
             ValueNS     => return copy self.value_target
         }
@@ -482,7 +467,7 @@ pure fn is_none<T>(x: Option<T>) -> bool {
     }
 }
 
-fn unused_import_lint_level(session: session) -> level {
+fn unused_import_lint_level(session: Session) -> level {
     for session.opts.lint_opts.each |lint_option_pair| {
         let (lint_type, lint_level) = *lint_option_pair;
         if lint_type == unused_imports {
@@ -504,8 +489,14 @@ impl Privacy : cmp::Eq {
     pure fn ne(other: &Privacy) -> bool { !self.eq(other) }
 }
 
-// Records a possibly-private definition.
-struct Definition {
+// Records a possibly-private type definition.
+enum TypeNsDef {
+    ModuleDef(Privacy, @Module),
+    TypeDef(Privacy, def)
+}
+
+// Records a possibly-private value definition.
+struct ValueNsDef {
     privacy: Privacy,
     def: def,
 }
@@ -513,13 +504,11 @@ struct Definition {
 // Records the definitions (at most one for each namespace) that a name is
 // bound to.
 struct NameBindings {
-    mut module_def: ModuleDef,         //< Meaning in module namespace.
-    mut type_def: Option<Definition>,  //< Meaning in type namespace.
-    mut value_def: Option<Definition>, //< Meaning in value namespace.
+    mut type_def: Option<TypeNsDef>,    //< Meaning in type namespace.
+    mut value_def: Option<ValueNsDef>,  //< Meaning in value namespace.
 
     // For error reporting
-    // XXX: Merge me into Definition.
-    mut module_span: Option<span>,
+    // XXX: Merge me into TypeDef and ValueDef.
     mut type_span: Option<span>,
     mut value_span: Option<span>,
 }
@@ -532,30 +521,30 @@ impl NameBindings {
                      def_id: Option<def_id>,
                      legacy_exports: bool,
                      sp: span) {
-        if self.module_def.is_none() {
+        if self.type_def.is_none() {
             let module_ = @Module(parent_link, def_id, legacy_exports);
-            self.module_def = ModuleDef(privacy, module_);
-            self.module_span = Some(sp);
+            self.type_def = Some(ModuleDef(privacy, module_));
+            self.type_span = Some(sp);
         }
     }
 
     /// Records a type definition.
     fn define_type(privacy: Privacy, def: def, sp: span) {
-        self.type_def = Some(Definition { privacy: privacy, def: def });
+        self.type_def = Some(TypeDef(privacy, def));
         self.type_span = Some(sp);
     }
 
     /// Records a value definition.
     fn define_value(privacy: Privacy, def: def, sp: span) {
-        self.value_def = Some(Definition { privacy: privacy, def: def });
+        self.value_def = Some(ValueNsDef { privacy: privacy, def: def });
         self.value_span = Some(sp);
     }
 
     /// Returns the module node if applicable.
     fn get_module_if_available() -> Option<@Module> {
-        match self.module_def {
-            NoModuleDef         => return None,
-            ModuleDef(_privacy, module_)  => return Some(module_)
+        match self.type_def {
+            Some(ModuleDef(_, module_)) => return Some(module_),
+            None | Some(TypeDef(_, _))  => return None,
         }
     }
 
@@ -564,70 +553,76 @@ impl NameBindings {
      * definition.
      */
     fn get_module() -> @Module {
-        match self.module_def {
-            NoModuleDef => {
-                fail
-                    ~"get_module called on a node with no module definition!";
-            }
-            ModuleDef(_, module_) => {
-                return module_;
+        match self.type_def {
+            None | Some(TypeDef(*)) => {
+                fail ~"get_module called on a node with no module \
+                       definition!"
             }
+            Some(ModuleDef(_, module_)) => module_
         }
     }
 
     fn defined_in_namespace(namespace: Namespace) -> bool {
         match namespace {
-            ModuleNS => {
-                match self.module_def {
-                    NoModuleDef => false,
-                    _ => true
-                }
-            }
             TypeNS   => return self.type_def.is_some(),
             ValueNS  => return self.value_def.is_some()
         }
     }
 
-    fn def_for_namespace(namespace: Namespace) -> Option<Definition> {
+    fn def_for_namespace(namespace: Namespace) -> Option<def> {
         match namespace {
-          TypeNS => return self.type_def,
-          ValueNS => return self.value_def,
-          ModuleNS => match self.module_def {
-            NoModuleDef => return None,
-            ModuleDef(privacy, module_) =>
-                match module_.def_id {
-                    None => return None,
-                    Some(def_id) => {
-                        return Some(Definition {
-                            privacy: privacy,
-                            def: def_mod(def_id)
-                        });
+            TypeNS => {
+                match self.type_def {
+                    None => None,
+                    Some(ModuleDef(_, module_)) => {
+                        module_.def_id.map(|def_id| def_mod(*def_id))
                     }
+                    Some(TypeDef(_, def)) => Some(def)
                 }
-          }
+            }
+            ValueNS => {
+                match self.value_def {
+                    None => None,
+                    Some(value_def) => Some(value_def.def)
+                }
+            }
+        }
+    }
+
+    fn privacy_for_namespace(namespace: Namespace) -> Option<Privacy> {
+        match namespace {
+            TypeNS => {
+                match self.type_def {
+                    None => None,
+                    Some(ModuleDef(privacy, _)) | Some(TypeDef(privacy, _)) =>
+                        Some(privacy)
+                }
+            }
+            ValueNS => {
+                match self.value_def {
+                    None => None,
+                    Some(value_def) => Some(value_def.privacy)
+                }
+            }
         }
     }
 
     fn span_for_namespace(namespace: Namespace) -> Option<span> {
-        match self.def_for_namespace(namespace) {
-          Some(_) => {
+        if self.defined_in_namespace(namespace) {
             match namespace {
-              TypeNS   => self.type_span,
-              ValueNS  => self.value_span,
-              ModuleNS => self.module_span
+                TypeNS  => self.type_span,
+                ValueNS => self.value_span,
             }
-          }
-          None => None
+        } else {
+            None
         }
     }
 }
 
 fn NameBindings() -> NameBindings {
     NameBindings {
-        module_def: NoModuleDef,
         type_def: None,
         value_def: None,
-        module_span: None,
         type_span: None,
         value_span: None
     }
@@ -675,9 +670,8 @@ fn PrimitiveTypeTable(intr: @ident_interner) -> PrimitiveTypeTable {
 
 fn namespace_to_str(ns: Namespace) -> ~str {
     match ns {
-      TypeNS   => ~"type",
-      ValueNS  => ~"value",
-      ModuleNS => ~"module"
+        TypeNS  => ~"type",
+        ValueNS => ~"value",
     }
 }
 
@@ -693,9 +687,8 @@ fn has_legacy_export_attr(attrs: &[syntax::ast::attribute]) -> bool {
     return false;
 }
 
-fn Resolver(session: session, lang_items: LanguageItems,
+fn Resolver(session: Session, lang_items: LanguageItems,
             crate: @crate) -> Resolver {
-
     let graph_root = @NameBindings();
 
     (*graph_root).define_module(Public,
@@ -735,7 +728,7 @@ fn Resolver(session: session, lang_items: LanguageItems,
         primitive_type_table: @PrimitiveTypeTable(session.
                                                   parse_sess.interner),
 
-        namespaces: ~[ ModuleNS, TypeNS, ValueNS ],
+        namespaces: ~[ TypeNS, ValueNS ],
 
         def_map: HashMap(),
         export_map2: HashMap(),
@@ -749,7 +742,7 @@ fn Resolver(session: session, lang_items: LanguageItems,
 
 /// The main resolver class.
 struct Resolver {
-    session: session,
+    session: Session,
     lang_items: LanguageItems,
     crate: @crate,
 
@@ -992,14 +985,14 @@ impl Resolver {
 
         match item.node {
             item_mod(module_) => {
-              let legacy = has_legacy_export_attr(item.attrs);
-              let (name_bindings, new_parent) = self.add_child(ident, parent,
-                                                       ~[ModuleNS], sp);
+                let legacy = has_legacy_export_attr(item.attrs);
+                let (name_bindings, new_parent) =
+                    self.add_child(ident, parent, ~[TypeNS], sp);
 
                 let parent_link = self.get_parent_link(new_parent, ident);
                 let def_id = { crate: 0, node: item.id };
-              (*name_bindings).define_module(privacy, parent_link,
-                                             Some(def_id), legacy, sp);
+                (*name_bindings).define_module(privacy, parent_link,
+                                               Some(def_id), legacy, sp);
 
                 let new_parent =
                     ModuleReducedGraphParent((*name_bindings).get_module());
@@ -1007,25 +1000,30 @@ impl Resolver {
                 visit_mod(module_, sp, item.id, new_parent, visitor);
             }
             item_foreign_mod(fm) => {
-              let legacy = has_legacy_export_attr(item.attrs);
-              let new_parent = match fm.sort {
-                named => {
-                  let (name_bindings, new_parent) = self.add_child(ident,
-                     parent, ~[ModuleNS], sp);
+                let legacy = has_legacy_export_attr(item.attrs);
+                let new_parent = match fm.sort {
+                    named => {
+                        let (name_bindings, new_parent) =
+                            self.add_child(ident, parent, ~[TypeNS], sp);
 
-                  let parent_link = self.get_parent_link(new_parent, ident);
-                  let def_id = { crate: 0, node: item.id };
-                  (*name_bindings).define_module(privacy, parent_link,
-                                                 Some(def_id), legacy, sp);
+                        let parent_link = self.get_parent_link(new_parent,
+                                                               ident);
+                        let def_id = { crate: 0, node: item.id };
+                        (*name_bindings).define_module(privacy,
+                                                       parent_link,
+                                                       Some(def_id),
+                                                       legacy,
+                                                       sp);
+
+                        ModuleReducedGraphParent(name_bindings.get_module())
+                    }
 
-                  ModuleReducedGraphParent((*name_bindings).get_module())
-                }
-                // For anon foreign mods, the contents just go in the
-                // current scope
-                anonymous => parent
-              };
+                    // For anon foreign mods, the contents just go in the
+                    // current scope
+                    anonymous => parent
+                };
 
-              visit_item(item, new_parent, visitor);
+                visit_item(item, new_parent, visitor);
             }
 
             // These items live in the value namespace.
@@ -1226,7 +1224,7 @@ impl Resolver {
                     match view_path.node {
                         view_path_simple(binding, full_path, ns, _) => {
                             let ns = match ns {
-                                module_ns => ModuleNSOnly,
+                                module_ns => TypeNSOnly,
                                 type_value_ns => AnyNS
                             };
 
@@ -1326,8 +1324,7 @@ impl Resolver {
                 match find_use_stmt_cnum(self.session.cstore, node_id) {
                     Some(crate_id) => {
                         let (child_name_bindings, new_parent) =
-                            // should this be in ModuleNS? --tjc
-                            self.add_child(name, parent, ~[ModuleNS],
+                            self.add_child(name, parent, ~[TypeNS],
                                            view_item.span);
 
                         let def_id = { crate: crate_id, node: 0 };
@@ -1410,8 +1407,8 @@ impl Resolver {
                            ident: ident, new_parent: ReducedGraphParent) {
         match def {
           def_mod(def_id) | def_foreign_mod(def_id) => {
-            match copy child_name_bindings.module_def {
-              NoModuleDef => {
+            match copy child_name_bindings.type_def {
+              None => {
                 debug!("(building reduced graph for \
                         external crate) building module \
                         %s", final_ident);
@@ -1441,10 +1438,8 @@ impl Resolver {
                         fail ~"can't happen";
                       }
                       ModuleParentLink(parent_module, ident) => {
-
                         let name_bindings = parent_module.children.get(ident);
-
-                        resolution.module_target =
+                        resolution.type_target =
                             Some(Target(parent_module, name_bindings));
                       }
                     }
@@ -1456,13 +1451,16 @@ impl Resolver {
                   }
                 }
               }
-              ModuleDef(_priv, module_) => {
+              Some(ModuleDef(_, module_)) => {
                 debug!("(building reduced graph for \
                         external crate) already created \
                         module");
                 module_.def_id = Some(def_id);
                 modules.insert(def_id, module_);
               }
+              Some(TypeDef(*)) => {
+                self.session.bug(~"external module def overwriting type def");
+              }
             }
           }
           def_fn(*) | def_static_method(*) | def_const(*) |
@@ -1553,8 +1551,8 @@ impl Resolver {
                                    ~[], dummy_sp());
 
                 // Define or reuse the module node.
-                match child_name_bindings.module_def {
-                    NoModuleDef => {
+                match child_name_bindings.type_def {
+                    None => {
                         debug!("(building reduced graph for external crate) \
                                 autovivifying %s", *ident_str);
                         let parent_link = self.get_parent_link(new_parent,
@@ -1564,7 +1562,7 @@ impl Resolver {
                                                              None, false,
                                                              dummy_sp());
                     }
-                    ModuleDef(*) => { /* Fall through. */ }
+                    Some(_) => { /* Fall through. */ }
                 }
 
                 current_module = (*child_name_bindings).get_module();
@@ -1807,7 +1805,7 @@ impl Resolver {
                                                            target,
                                                            source);
                         }
-                        SingleImport(target, source, ModuleNSOnly) => {
+                        SingleImport(target, source, TypeNSOnly) => {
                             resolution_result =
                                 self.resolve_single_module_import
                                     (module_, containing_module, target,
@@ -1876,12 +1874,11 @@ impl Resolver {
             return Failed;
         }
 
-        // We need to resolve all four namespaces for this to succeed.
+        // We need to resolve both namespaces for this to succeed.
         //
         // XXX: See if there's some way of handling namespaces in a more
-        // generic way. We have four of them; it seems worth doing...
+        // generic way. We have two of them; it seems worth doing...
 
-        let mut module_result = UnknownResult;
         let mut value_result = UnknownResult;
         let mut type_result = UnknownResult;
 
@@ -1891,10 +1888,6 @@ impl Resolver {
                 // Continue.
             }
             Some(child_name_bindings) => {
-                if (*child_name_bindings).defined_in_namespace(ModuleNS) {
-                    module_result = BoundResult(containing_module,
-                                                child_name_bindings);
-                }
                 if (*child_name_bindings).defined_in_namespace(ValueNS) {
                     value_result = BoundResult(containing_module,
                                                child_name_bindings);
@@ -1906,11 +1899,10 @@ impl Resolver {
             }
         }
 
-        // Unless we managed to find a result in all four namespaces
-        // (exceedingly unlikely), search imports as well.
-
-        match (module_result, value_result, type_result) {
-            (BoundResult(*), BoundResult(*), BoundResult(*)) => {
+        // Unless we managed to find a result in both namespaces (unlikely),
+        // search imports as well.
+        match (value_result, type_result) {
+            (BoundResult(*), BoundResult(*)) => {
                 // Continue.
             }
             _ => {
@@ -1934,9 +1926,6 @@ impl Resolver {
                         // therefore accurately report that the names are
                         // unbound.
 
-                        if module_result.is_unknown() {
-                            module_result = UnboundResult;
-                        }
                         if value_result.is_unknown() {
                             value_result = UnboundResult;
                         }
@@ -1973,11 +1962,6 @@ impl Resolver {
 
                         // The name is an import which has been fully
                         // resolved. We can, therefore, just follow it.
-
-                        if module_result.is_unknown() {
-                            module_result = get_binding(import_resolution,
-                                                        ModuleNS);
-                        }
                         if value_result.is_unknown() {
                             value_result = get_binding(import_resolution,
                                                        ValueNS);
@@ -2001,20 +1985,6 @@ impl Resolver {
         assert module_.import_resolutions.contains_key(target);
         let import_resolution = module_.import_resolutions.get(target);
 
-        match module_result {
-            BoundResult(target_module, name_bindings) => {
-                debug!("(resolving single import) found module binding");
-                import_resolution.module_target =
-                    Some(Target(target_module, name_bindings));
-            }
-            UnboundResult => {
-                debug!("(resolving single import) didn't find module \
-                        binding");
-            }
-            UnknownResult => {
-                fail ~"module result should be known at this point";
-            }
-        }
         match value_result {
             BoundResult(target_module, name_bindings) => {
                 import_resolution.value_target =
@@ -2037,12 +2007,10 @@ impl Resolver {
         }
 
         let i = import_resolution;
-        match (i.module_target, i.value_target, i.type_target) {
-          /*
-            If this name wasn't found in any of the four namespaces, it's
-            definitely unresolved
-           */
-          (None, None, None) => { return Failed; }
+        match (i.value_target, i.type_target) {
+          // If this name wasn't found in either namespace, it's definitely
+          // unresolved.
+          (None, None) => { return Failed; }
           _ => {}
         }
 
@@ -2081,7 +2049,7 @@ impl Resolver {
                 // Continue.
             }
             Some(child_name_bindings) => {
-                if (*child_name_bindings).defined_in_namespace(ModuleNS) {
+                if (*child_name_bindings).defined_in_namespace(TypeNS) {
                     module_result = BoundResult(containing_module,
                                                 child_name_bindings);
                 }
@@ -2125,8 +2093,8 @@ impl Resolver {
                         // resolved. We can, therefore, just follow it.
 
                         if module_result.is_unknown() {
-                            match (*import_resolution).
-                                    target_for_namespace(ModuleNS) {
+                            match (*import_resolution).target_for_namespace(
+                                    TypeNS) {
                                 None => {
                                     module_result = UnboundResult;
                                 }
@@ -2156,7 +2124,7 @@ impl Resolver {
         match module_result {
             BoundResult(target_module, name_bindings) => {
                 debug!("(resolving single import) found module binding");
-                import_resolution.module_target =
+                import_resolution.type_target =
                     Some(Target(target_module, name_bindings));
             }
             UnboundResult => {
@@ -2169,8 +2137,8 @@ impl Resolver {
         }
 
         let i = import_resolution;
-        if i.module_target.is_none() {
-          // If this name wasn't found in the module namespace, it's
+        if i.type_target.is_none() {
+          // If this name wasn't found in the type namespace, it's
           // definitely unresolved.
           return Failed;
         }
@@ -2222,7 +2190,7 @@ impl Resolver {
 
             debug!("(resolving glob import) writing module resolution \
                     %? into `%s`",
-                   is_none(target_import_resolution.module_target),
+                   is_none(target_import_resolution.type_target),
                    self.module_to_str(module_));
 
             // Here we merge two import resolutions.
@@ -2232,8 +2200,6 @@ impl Resolver {
                     let new_import_resolution =
                         @ImportResolution(privacy,
                                           target_import_resolution.span);
-                    new_import_resolution.module_target =
-                        copy target_import_resolution.module_target;
                     new_import_resolution.value_target =
                         copy target_import_resolution.value_target;
                     new_import_resolution.type_target =
@@ -2246,15 +2212,6 @@ impl Resolver {
                     // Merge the two import resolutions at a finer-grained
                     // level.
 
-                    match copy target_import_resolution.module_target {
-                        None => {
-                            // Continue.
-                        }
-                        Some(module_target) => {
-                            dest_import_resolution.module_target =
-                                Some(copy module_target);
-                        }
-                    }
                     match copy target_import_resolution.value_target {
                         None => {
                             // Continue.
@@ -2307,11 +2264,6 @@ impl Resolver {
                    self.module_to_str(module_));
 
             // Merge the child item into the import resolution.
-            if (*name_bindings).defined_in_namespace(ModuleNS) {
-                debug!("(resolving glob import) ... for module target");
-                dest_import_resolution.module_target =
-                    Some(Target(containing_module, name_bindings));
-            }
             if (*name_bindings).defined_in_namespace(ValueNS) {
                 debug!("(resolving glob import) ... for value target");
                 dest_import_resolution.value_target =
@@ -2345,9 +2297,8 @@ impl Resolver {
 
         while index < module_path_len {
             let name = (*module_path).get_elt(index);
-            match self.resolve_name_in_module(search_module, name, ModuleNS,
-                                            xray) {
-
+            match self.resolve_name_in_module(search_module, name, TypeNS,
+                                              xray) {
                 Failed => {
                     self.session.span_err(span, ~"unresolved name");
                     return Failed;
@@ -2359,8 +2310,8 @@ impl Resolver {
                     return Indeterminate;
                 }
                 Success(target) => {
-                    match target.bindings.module_def {
-                        NoModuleDef => {
+                    match target.bindings.type_def {
+                        None | Some(TypeDef(*)) => {
                             // Not a module.
                             self.session.span_err(span,
                                                   fmt!("not a module: %s",
@@ -2368,7 +2319,7 @@ impl Resolver {
                                                            str_of(name)));
                             return Failed;
                         }
-                        ModuleDef(_, copy module_) => {
+                        Some(ModuleDef(_, copy module_)) => {
                             search_module = module_;
                         }
                     }
@@ -2443,7 +2394,6 @@ impl Resolver {
         match module_.children.find(name) {
             Some(name_bindings)
                     if (*name_bindings).defined_in_namespace(namespace) => {
-
                 return Success(Target(module_, name_bindings));
             }
             Some(_) | None => { /* Not found; continue. */ }
@@ -2516,15 +2466,15 @@ impl Resolver {
     fn resolve_module_in_lexical_scope(module_: @Module, name: ident)
                                     -> ResolveResult<@Module> {
 
-        match self.resolve_item_in_lexical_scope(module_, name, ModuleNS) {
+        match self.resolve_item_in_lexical_scope(module_, name, TypeNS) {
             Success(target) => {
-                match target.bindings.module_def {
-                    NoModuleDef => {
+                match target.bindings.type_def {
+                    None | Some(TypeDef(*)) => {
                         error!("!!! (resolving module in lexical scope) module
                                 wasn't actually a module!");
                         return Failed;
                     }
-                    ModuleDef(_, module_) => {
+                    Some(ModuleDef(_, module_)) => {
                         return Success(module_);
                     }
                 }
@@ -2661,8 +2611,7 @@ impl Resolver {
         debug!("(resolving one-level naming result) searching for module");
         match self.resolve_item_in_lexical_scope(module_,
                                                  source_name,
-                                                 ModuleNS) {
-
+                                                 TypeNS) {
             Failed => {
                 debug!("(resolving one-level renaming import) didn't find \
                         module result");
@@ -2682,7 +2631,7 @@ impl Resolver {
 
         let mut value_result;
         let mut type_result;
-        if allowable_namespaces == ModuleNSOnly {
+        if allowable_namespaces == TypeNSOnly {
             value_result = None;
             type_result = None;
         } else {
@@ -2772,7 +2721,6 @@ impl Resolver {
                        self.session.str_of(target_name),
                        self.module_to_str(module_));
 
-                import_resolution.module_target = module_result;
                 import_resolution.value_target = value_result;
                 import_resolution.type_target = type_result;
 
@@ -2885,18 +2833,19 @@ impl Resolver {
                                    ident: ident,
                                    namebindings: @NameBindings,
                                    reexport: bool) {
-        for [ModuleNS, TypeNS, ValueNS].each |ns| {
-            match namebindings.def_for_namespace(*ns) {
-                Some(d) if d.privacy == Public => {
+        for [ TypeNS, ValueNS ].each |ns| {
+            match (namebindings.def_for_namespace(*ns),
+                   namebindings.privacy_for_namespace(*ns)) {
+                (Some(d), Some(Public)) => {
                     debug!("(computing exports) YES: %s '%s' \
                             => %?",
                            if reexport { ~"reexport" } else { ~"export"},
                            self.session.str_of(ident),
-                           def_id_of_def(d.def));
+                           def_id_of_def(d));
                     exports2.push(Export2 {
                         reexport: reexport,
                         name: self.session.str_of(ident),
-                        def_id: def_id_of_def(d.def)
+                        def_id: def_id_of_def(d)
                     });
                 }
                 _ => ()
@@ -2914,12 +2863,13 @@ impl Resolver {
         }
 
         for module_.import_resolutions.each_ref |ident, importresolution| {
-            for [ModuleNS, TypeNS, ValueNS].each |ns| {
+            for [ TypeNS, ValueNS ].each |ns| {
                 match importresolution.target_for_namespace(*ns) {
                     Some(target) => {
                         debug!("(computing exports) maybe reexport '%s'",
                                self.session.str_of(*ident));
-                        self.add_exports_of_namebindings(exports2, *ident,
+                        self.add_exports_of_namebindings(exports2,
+                                                         *ident,
                                                          target.bindings,
                                                          true)
                     }
@@ -3666,7 +3616,7 @@ impl Resolver {
                               span: span,
                               type_parameters: ~[ty_param],
                               opt_trait_reference: Option<@trait_ref>,
-                              self_type: @ty,
+                              self_type: @Ty,
                               methods: ~[@method],
                               visitor: ResolveVisitor) {
 
@@ -3864,7 +3814,7 @@ impl Resolver {
         debug!("(resolving block) leaving block");
     }
 
-    fn resolve_type(ty: @ty, visitor: ResolveVisitor) {
+    fn resolve_type(ty: @Ty, visitor: ResolveVisitor) {
         match ty.node {
             // Like path expressions, the interpretation of path types depends
             // on whether the path has multiple elements in it or not.
@@ -3872,42 +3822,44 @@ impl Resolver {
             ty_path(path, path_id) => {
                 // This is a path in the type namespace. Walk through scopes
                 // scopes looking for it.
+                let mut result_def = None;
 
-                let mut result_def;
-                match self.resolve_path(path, TypeNS, true, visitor) {
-                    Some(def) => {
-                        debug!("(resolving type) resolved `%s` to type",
-                               self.session.str_of(path.idents.last()));
-                        result_def = Some(def);
-                    }
-                    None => {
-                        result_def = None;
+                // First, check to see whether the name is a primitive type.
+                if path.idents.len() == 1u {
+                    let name = path.idents.last();
+
+                    match self.primitive_type_table
+                            .primitive_types
+                            .find(name) {
+
+                        Some(primitive_type) => {
+                            result_def =
+                                Some(def_prim_ty(primitive_type));
+                        }
+                        None => {
+                            // Continue.
+                        }
                     }
                 }
 
                 match result_def {
-                    Some(_) => {
-                        // Continue.
-                    }
                     None => {
-                        // Check to see whether the name is a primitive type.
-                        if path.idents.len() == 1u {
-                            let name = path.idents.last();
-
-                            match self.primitive_type_table
-                                    .primitive_types
-                                    .find(name) {
-
-                                Some(primitive_type) => {
-                                    result_def =
-                                        Some(def_prim_ty(primitive_type));
-                                }
-                                None => {
-                                    // Continue.
-                                }
+                        match self.resolve_path(path, TypeNS, true, visitor) {
+                            Some(def) => {
+                                debug!("(resolving type) resolved `%s` to \
+                                        type",
+                                       self.session.str_of(
+                                            path.idents.last()));
+                                result_def = Some(def);
+                            }
+                            None => {
+                                result_def = None;
                             }
                         }
                     }
+                    Some(_) => {
+                        // Continue.
+                    }
                 }
 
                 match copy result_def {
@@ -4223,12 +4175,17 @@ impl Resolver {
         // First, search children.
         match containing_module.children.find(name) {
             Some(child_name_bindings) => {
-                match (*child_name_bindings).def_for_namespace(namespace) {
-                    Some(def) if def.privacy == Public || xray == Xray => {
+                match (child_name_bindings.def_for_namespace(namespace),
+                       child_name_bindings.privacy_for_namespace(namespace)) {
+                    (Some(def), Some(Public)) => {
                         // Found it. Stop the search here.
-                        return ChildNameDefinition(def.def);
+                        return ChildNameDefinition(def);
                     }
-                    Some(_) | None => {
+                    (Some(def), _) if xray == Xray => {
+                        // Found it. Stop the search here.
+                        return ChildNameDefinition(def);
+                    }
+                    (Some(_), _) | (None, _) => {
                         // Continue.
                     }
                 }
@@ -4244,14 +4201,15 @@ impl Resolver {
                                        xray == Xray => {
                 match (*import_resolution).target_for_namespace(namespace) {
                     Some(target) => {
-                        match (*target.bindings)
-                            .def_for_namespace(namespace) {
-                            Some(def) if def.privacy == Public => {
+                        match (target.bindings.def_for_namespace(namespace),
+                               target.bindings.privacy_for_namespace(
+                                    namespace)) {
+                            (Some(def), Some(Public)) => {
                                 // Found it.
                                 import_resolution.used = true;
-                                return ImportNameDefinition(def.def);
+                                return ImportNameDefinition(def);
                             }
-                            Some(_) | None => {
+                            (Some(_), _) | (None, _) => {
                                 // This can happen with external impls, due to
                                 // the imperfect way we read the metadata.
 
@@ -4391,9 +4349,6 @@ impl Resolver {
                 search_result = self.search_ribs(self.type_ribs, ident, span,
                                                  AllowCapturingSelf);
             }
-            ModuleNS => {
-                fail ~"module namespaces do not have local ribs";
-            }
         }
 
         match copy search_result {
@@ -4413,23 +4368,22 @@ impl Resolver {
     fn resolve_item_by_identifier_in_lexical_scope(ident: ident,
                                                    namespace: Namespace)
                                                 -> Option<def> {
-
         // Check the items.
         match self.resolve_item_in_lexical_scope(self.current_module,
                                                ident,
                                                namespace) {
-
             Success(target) => {
                 match (*target.bindings).def_for_namespace(namespace) {
                     None => {
-                        fail ~"resolved name in a namespace to a set of name \
-                              bindings with no def for that namespace?!";
+                        // This can happen if we were looking for a type and
+                        // found a module instead. Modules don't have defs.
+                        return None;
                     }
                     Some(def) => {
                         debug!("(resolving item path in lexical scope) \
                                 resolved `%s` to item",
                                self.session.str_of(ident));
-                        return Some(def.def);
+                        return Some(def);
                     }
                 }
             }
@@ -4703,7 +4657,7 @@ impl Resolver {
             for search_module.children.each |_name, child_name_bindings| {
                 match child_name_bindings.def_for_namespace(TypeNS) {
                     Some(def) => {
-                        match def.def {
+                        match def {
                             def_ty(trait_def_id) => {
                                 self.add_trait_info_if_containing_method(
                                     found_traits, trait_def_id, name);
@@ -4730,7 +4684,7 @@ impl Resolver {
                     Some(target) => {
                         match target.bindings.def_for_namespace(TypeNS) {
                             Some(def) => {
-                                match def.def {
+                                match def {
                                     def_ty(trait_def_id) => {
                                         self.
                                         add_trait_info_if_containing_method(
@@ -4937,15 +4891,6 @@ impl Resolver {
 
         debug!("Import resolutions:");
         for module_.import_resolutions.each |name, import_resolution| {
-            let mut module_repr;
-            match (*import_resolution).target_for_namespace(ModuleNS) {
-                None => { module_repr = ~""; }
-                Some(_) => {
-                    module_repr = ~" module:?";
-                    // XXX
-                }
-            }
-
             let mut value_repr;
             match (*import_resolution).target_for_namespace(ValueNS) {
                 None => { value_repr = ~""; }
@@ -4964,15 +4909,14 @@ impl Resolver {
                 }
             }
 
-            debug!("* %s:%s%s%s",
-                   self.session.str_of(name),
-                   module_repr, value_repr, type_repr);
+            debug!("* %s:%s%s", self.session.str_of(name),
+                   value_repr, type_repr);
         }
     }
 }
 
 /// Entry point to crate resolution.
-fn resolve_crate(session: session, lang_items: LanguageItems, crate: @crate)
+fn resolve_crate(session: Session, lang_items: LanguageItems, crate: @crate)
               -> { def_map: DefMap,
                    exp_map2: ExportMap2,
                    trait_map: TraitMap } {
diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs
index 50ea80a134c..d760bc34907 100644
--- a/src/rustc/middle/trans/alt.rs
+++ b/src/rustc/middle/trans/alt.rs
@@ -99,7 +99,6 @@
  *
  */
 
-use driver::session::session;
 use lib::llvm::llvm;
 use lib::llvm::{ValueRef, BasicBlockRef};
 use pat_util::*;
diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs
index 93e8435d3e9..4c9a006007e 100644
--- a/src/rustc/middle/trans/base.rs
+++ b/src/rustc/middle/trans/base.rs
@@ -17,7 +17,7 @@ use libc::{c_uint, c_ulonglong};
 use std::{map, time, list};
 use std::map::HashMap;
 use driver::session;
-use session::session;
+use session::Session;
 use syntax::attr;
 use back::{link, abi, upcall};
 use syntax::{ast, ast_util, codemap, ast_map};
@@ -2377,7 +2377,7 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef {
 }
 
 
-fn decl_crate_map(sess: session::session, mapmeta: link_meta,
+fn decl_crate_map(sess: session::Session, mapmeta: link_meta,
                   llmod: ModuleRef) -> ValueRef {
     let targ_cfg = sess.targ_cfg;
     let int_type = T_int(targ_cfg);
@@ -2482,7 +2482,7 @@ fn write_abi_version(ccx: @crate_ctxt) {
                      false);
 }
 
-fn trans_crate(sess: session::session,
+fn trans_crate(sess: session::Session,
                crate: @ast::crate,
                tcx: ty::ctxt,
                output: &Path,
diff --git a/src/rustc/middle/trans/build.rs b/src/rustc/middle/trans/build.rs
index 69de8a2cca3..dfcc66adc3a 100644
--- a/src/rustc/middle/trans/build.rs
+++ b/src/rustc/middle/trans/build.rs
@@ -6,7 +6,6 @@ use codemap::span;
 use lib::llvm::{ValueRef, TypeRef, BasicBlockRef, BuilderRef, ModuleRef};
 use lib::llvm::{Opcode, IntPredicate, RealPredicate, True, False,
         CallConv, TypeKind, AtomicBinOp, AtomicOrdering};
-use driver::session::session;
 use common::*;
 
 fn B(cx: block) -> BuilderRef {
diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs
index 0fa22dd65ba..931e82d5be9 100644
--- a/src/rustc/middle/trans/common.rs
+++ b/src/rustc/middle/trans/common.rs
@@ -8,7 +8,7 @@ use vec::raw::to_ptr;
 use std::map::{HashMap,Set};
 use syntax::{ast, ast_map};
 use driver::session;
-use session::session;
+use session::Session;
 use middle::ty;
 use back::{link, abi, upcall};
 use syntax::codemap::span;
@@ -110,7 +110,7 @@ fn BuilderRef_res(B: BuilderRef) -> BuilderRef_res {
 
 // Crate context.  Every crate we compile has one of these.
 type crate_ctxt = {
-     sess: session::session,
+     sess: session::Session,
      llmod: ModuleRef,
      td: target_data,
      tn: type_names,
@@ -605,7 +605,7 @@ fn block_parent(cx: block) -> block {
 impl block {
     pure fn ccx() -> @crate_ctxt { self.fcx.ccx }
     pure fn tcx() -> ty::ctxt { self.fcx.ccx.tcx }
-    pure fn sess() -> session { self.fcx.ccx.sess }
+    pure fn sess() -> Session { self.fcx.ccx.sess }
 
     fn node_id_to_str(id: ast::node_id) -> ~str {
         ast_map::node_id_to_str(self.tcx().items, id, self.sess().intr())
@@ -1191,7 +1191,7 @@ fn align_to(cx: block, off: ValueRef, align: ValueRef) -> ValueRef {
     return build::And(cx, bumped, build::Not(cx, mask));
 }
 
-fn path_str(sess: session::session, p: path) -> ~str {
+fn path_str(sess: session::Session, p: path) -> ~str {
     let mut r = ~"", first = true;
     for vec::each(p) |e| {
         match *e {
diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs
index 068ec49d6c7..2db0dd59cf9 100644
--- a/src/rustc/middle/trans/debuginfo.rs
+++ b/src/rustc/middle/trans/debuginfo.rs
@@ -9,7 +9,7 @@ use middle::ty;
 use syntax::{ast, codemap, ast_util, ast_map};
 use syntax::parse::token::ident_interner;
 use codemap::span;
-use ast::ty;
+use ast::Ty;
 use pat_util::*;
 use util::ppaux::ty_to_str;
 use driver::session::session;
@@ -229,7 +229,7 @@ fn create_file(cx: @crate_ctxt, full_path: ~str) -> @metadata<file_md> {
     return mdval;
 }
 
-fn line_from_span(cm: codemap::codemap, sp: span) -> uint {
+fn line_from_span(cm: codemap::CodeMap, sp: span) -> uint {
     codemap::lookup_char_pos(cm, sp.lo).line
 }
 
@@ -469,7 +469,7 @@ fn create_composite_type(type_tag: int, name: ~str, file: ValueRef, line: int,
 }
 
 fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t,
-              vec_ty_span: codemap::span, elem_ty: @ast::ty)
+              vec_ty_span: codemap::span, elem_ty: @ast::Ty)
     -> @metadata<tydesc_md> {
     let fname = filename_from_span(cx, vec_ty_span);
     let file_node = create_file(cx, fname);
@@ -492,7 +492,7 @@ fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t,
     return @{node: llnode, data: {hash: ty::type_id(vec_t)}};
 }
 
-fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::ty)
+fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::Ty)
     -> @metadata<tydesc_md> {
     /*let cache = get_cache(cx);
     match cached_metadata::<@metadata<tydesc_md>>(
diff --git a/src/rustc/middle/trans/foreign.rs b/src/rustc/middle/trans/foreign.rs
index 5a6260ae270..8a03884f415 100644
--- a/src/rustc/middle/trans/foreign.rs
+++ b/src/rustc/middle/trans/foreign.rs
@@ -1,7 +1,7 @@
 // The classification code for the x86_64 ABI is taken from the clay language
 // https://github.com/jckarter/clay/blob/master/compiler/src/externals.cpp
 
-use driver::session::{session, arch_x86_64};
+use driver::session::arch_x86_64;
 use syntax::codemap::span;
 use libc::c_uint;
 use syntax::{attr, ast_map};
diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs
index bfb8de76a6c..a99ef96b254 100644
--- a/src/rustc/middle/trans/reachable.rs
+++ b/src/rustc/middle/trans/reachable.rs
@@ -128,7 +128,7 @@ fn mk_ty_visitor() -> visit::vt<ctx> {
     visit::mk_vt(@{visit_ty: traverse_ty, ..*visit::default_visitor()})
 }
 
-fn traverse_ty(ty: @ty, cx: ctx, v: visit::vt<ctx>) {
+fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt<ctx>) {
     if cx.rmap.contains_key(ty.id) { return; }
     cx.rmap.insert(ty.id, ());
 
diff --git a/src/rustc/middle/trans/reflect.rs b/src/rustc/middle/trans/reflect.rs
index c105caecaeb..18a25888bb4 100644
--- a/src/rustc/middle/trans/reflect.rs
+++ b/src/rustc/middle/trans/reflect.rs
@@ -1,5 +1,4 @@
 use std::map::HashMap;
-use driver::session::session;
 use lib::llvm::{TypeRef, ValueRef};
 use syntax::ast;
 use back::abi;
diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs
index b78314a6747..149c6ea532d 100644
--- a/src/rustc/middle/trans/tvec.rs
+++ b/src/rustc/middle/trans/tvec.rs
@@ -1,5 +1,4 @@
 use syntax::ast;
-use driver::session::session;
 use lib::llvm::{ValueRef, TypeRef};
 use back::abi;
 use syntax::codemap::span;
diff --git a/src/rustc/middle/trans/type_use.rs b/src/rustc/middle/trans/type_use.rs
index 8ccc8a28de3..8b2efacd4d1 100644
--- a/src/rustc/middle/trans/type_use.rs
+++ b/src/rustc/middle/trans/type_use.rs
@@ -20,7 +20,6 @@
 use std::map::HashMap;
 use std::list;
 use std::list::{List, Cons, Nil};
-use driver::session::session;
 use metadata::csearch;
 use syntax::ast::*, syntax::ast_util, syntax::visit;
 use syntax::ast_map;
diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs
index a0ca46ee017..84510c7161e 100644
--- a/src/rustc/middle/ty.rs
+++ b/src/rustc/middle/ty.rs
@@ -5,7 +5,7 @@ use std::{map, smallintmap};
 use result::Result;
 use std::map::HashMap;
 use driver::session;
-use session::session;
+use session::Session;
 use syntax::{ast, ast_map};
 use syntax::ast_util;
 use syntax::ast_util::{is_local, local_def};
@@ -103,7 +103,7 @@ export ty_infer, mk_infer, type_is_ty_var, mk_var, mk_int_var;
 export InferTy, TyVar, IntVar;
 export ty_self, mk_self, type_has_self;
 export ty_class;
-export region, bound_region, encl_region;
+export Region, bound_region, encl_region;
 export re_bound, re_free, re_scope, re_static, re_var;
 export br_self, br_anon, br_named, br_cap_avoid;
 export get, type_has_params, type_needs_infer, type_has_regions;
@@ -114,7 +114,7 @@ export ty_var_id;
 export ty_to_def_id;
 export ty_fn_args;
 export ty_region;
-export kind, kind_implicitly_copyable, kind_send_copy, kind_copyable;
+export Kind, kind_implicitly_copyable, kind_send_copy, kind_copyable;
 export kind_noncopyable, kind_const;
 export kind_can_be_copied, kind_can_be_sent, kind_can_be_implicitly_copied;
 export kind_is_safe_for_default_mode;
@@ -219,7 +219,7 @@ enum vstore {
     vstore_fixed(uint),
     vstore_uniq,
     vstore_box,
-    vstore_slice(region)
+    vstore_slice(Region)
 }
 
 type field_ty = {
@@ -302,7 +302,7 @@ type AutoAdjustment = {
 #[auto_deserialize]
 type AutoRef = {
     kind: AutoRefKind,
-    region: region,
+    region: Region,
     mutbl: ast::mutability
 };
 
@@ -327,8 +327,8 @@ type ctxt =
       mut next_id: uint,
       vecs_implicitly_copyable: bool,
       legacy_modes: bool,
-      cstore: metadata::cstore::cstore,
-      sess: session::session,
+      cstore: metadata::cstore::CStore,
+      sess: session::Session,
       def_map: resolve::DefMap,
 
       region_map: middle::region::region_map,
@@ -354,8 +354,8 @@ type ctxt =
       short_names_cache: HashMap<t, @~str>,
       needs_drop_cache: HashMap<t, bool>,
       needs_unwind_cleanup_cache: HashMap<t, bool>,
-      kind_cache: HashMap<t, kind>,
-      ast_ty_to_ty_cache: HashMap<@ast::ty, ast_ty_to_ty_cache_entry>,
+      kind_cache: HashMap<t, Kind>,
+      ast_ty_to_ty_cache: HashMap<@ast::Ty, ast_ty_to_ty_cache_entry>,
       enum_var_cache: HashMap<def_id, @~[variant_info]>,
       trait_method_cache: HashMap<def_id, @~[method]>,
       ty_param_bounds: HashMap<ast::node_id, param_bounds>,
@@ -519,7 +519,7 @@ impl param_ty : to_bytes::IterBytes {
 /// Representation of regions:
 #[auto_serialize]
 #[auto_deserialize]
-enum region {
+enum Region {
     /// Bound regions are found (primarily) in function types.  They indicate
     /// region parameters that have yet to be replaced with actual regions
     /// (analogous to type parameters, except that due to the monomorphic
@@ -570,7 +570,7 @@ enum bound_region {
     br_cap_avoid(ast::node_id, @bound_region),
 }
 
-type opt_region = Option<region>;
+type opt_region = Option<Region>;
 
 /**
  * The type substs represents the kinds of things that can be substituted to
@@ -610,7 +610,7 @@ enum sty {
     ty_uniq(mt),
     ty_evec(mt, vstore),
     ty_ptr(mt),
-    ty_rptr(region, mt),
+    ty_rptr(Region, mt),
     ty_rec(~[field]),
     ty_fn(FnTy),
     ty_trait(def_id, substs, vstore),
@@ -656,9 +656,9 @@ enum type_err {
     terr_record_fields(expected_found<ident>),
     terr_arg_count,
     terr_mode_mismatch(expected_found<mode>),
-    terr_regions_does_not_outlive(region, region),
-    terr_regions_not_same(region, region),
-    terr_regions_no_overlap(region, region),
+    terr_regions_does_not_outlive(Region, Region),
+    terr_regions_not_same(Region, Region),
+    terr_regions_no_overlap(Region, Region),
     terr_vstores_differ(terr_vstore_kind, expected_found<vstore>),
     terr_in_field(@type_err, ast::ident),
     terr_sorts(expected_found<t>),
@@ -783,7 +783,7 @@ impl FnVid : to_bytes::IterBytes {
     }
 }
 
-fn param_bounds_to_kind(bounds: param_bounds) -> kind {
+fn param_bounds_to_kind(bounds: param_bounds) -> Kind {
     let mut kind = kind_noncopyable();
     for vec::each(*bounds) |bound| {
         match *bound {
@@ -834,7 +834,7 @@ fn new_ty_hash<V: Copy>() -> map::HashMap<t, V> {
     map::HashMap()
 }
 
-fn mk_ctxt(s: session::session,
+fn mk_ctxt(s: session::Session,
            dm: resolve::DefMap,
            amap: ast_map::map,
            freevars: freevars::freevar_map,
@@ -904,7 +904,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t {
       _ => ()
     }
     let mut flags = 0u;
-    fn rflags(r: region) -> uint {
+    fn rflags(r: Region) -> uint {
         (has_regions as uint) | {
             match r {
               ty::re_var(_) => needs_infer as uint,
@@ -1018,12 +1018,12 @@ fn mk_imm_uniq(cx: ctxt, ty: t) -> t { mk_uniq(cx, {ty: ty,
 
 fn mk_ptr(cx: ctxt, tm: mt) -> t { mk_t(cx, ty_ptr(tm)) }
 
-fn mk_rptr(cx: ctxt, r: region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) }
+fn mk_rptr(cx: ctxt, r: Region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) }
 
-fn mk_mut_rptr(cx: ctxt, r: region, ty: t) -> t {
+fn mk_mut_rptr(cx: ctxt, r: Region, ty: t) -> t {
     mk_rptr(cx, r, {ty: ty, mutbl: ast::m_mutbl})
 }
-fn mk_imm_rptr(cx: ctxt, r: region, ty: t) -> t {
+fn mk_imm_rptr(cx: ctxt, r: Region, ty: t) -> t {
     mk_rptr(cx, r, {ty: ty, mutbl: ast::m_imm})
 }
 
@@ -1148,7 +1148,7 @@ fn default_arg_mode_for_ty(tcx: ctxt, ty: ty::t) -> ast::rmode {
 
 // Returns the narrowest lifetime enclosing the evaluation of the expression
 // with id `id`.
-fn encl_region(cx: ctxt, id: ast::node_id) -> ty::region {
+fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region {
     match cx.region_map.find(id) {
       Some(encl_scope) => ty::re_scope(encl_scope),
       None => ty::re_static
@@ -1265,7 +1265,7 @@ fn fold_ty(cx: ctxt, t0: t, fldop: fn(t) -> t) -> t {
 fn walk_regions_and_ty(
     cx: ctxt,
     ty: t,
-    walkr: fn(r: region),
+    walkr: fn(r: Region),
     walkt: fn(t: t) -> bool) {
 
     if (walkt(ty)) {
@@ -1280,13 +1280,13 @@ fn walk_regions_and_ty(
 fn fold_regions_and_ty(
     cx: ctxt,
     ty: t,
-    fldr: fn(r: region) -> region,
+    fldr: fn(r: Region) -> Region,
     fldfnt: fn(t: t) -> t,
     fldt: fn(t: t) -> t) -> t {
 
     fn fold_substs(
         substs: &substs,
-        fldr: fn(r: region) -> region,
+        fldr: fn(r: Region) -> Region,
         fldt: fn(t: t) -> t) -> substs {
 
         {self_r: substs.self_r.map(|r| fldr(*r)),
@@ -1351,10 +1351,10 @@ fn fold_regions_and_ty(
 fn fold_regions(
     cx: ctxt,
     ty: t,
-    fldr: fn(r: region, in_fn: bool) -> region) -> t {
+    fldr: fn(r: Region, in_fn: bool) -> Region) -> t {
 
     fn do_fold(cx: ctxt, ty: t, in_fn: bool,
-               fldr: fn(region, bool) -> region) -> t {
+               fldr: fn(Region, bool) -> Region) -> t {
         if !type_has_regions(ty) { return ty; }
         fold_regions_and_ty(
             cx, ty,
@@ -1365,9 +1365,9 @@ fn fold_regions(
     do_fold(cx, ty, false, fldr)
 }
 
-fn fold_region(cx: ctxt, t0: t, fldop: fn(region, bool) -> region) -> t {
+fn fold_region(cx: ctxt, t0: t, fldop: fn(Region, bool) -> Region) -> t {
     fn do_fold(cx: ctxt, t0: t, under_r: bool,
-               fldop: fn(region, bool) -> region) -> t {
+               fldop: fn(Region, bool) -> Region) -> t {
         let tb = get(t0);
         if !tbox_has_flag(tb, has_regions) { return t0; }
         match tb.sty {
@@ -1777,7 +1777,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
     return needs_unwind_cleanup;
 }
 
-enum kind { kind_(u32) }
+enum Kind { kind_(u32) }
 
 /// can be copied (implicitly or explicitly)
 const KIND_MASK_COPY         : u32 = 0b000000000000000000000000001_u32;
@@ -1797,92 +1797,92 @@ const KIND_MASK_IMPLICIT     : u32 = 0b000000000000000000000010000_u32;
 /// safe for default mode (subset of KIND_MASK_IMPLICIT)
 const KIND_MASK_DEFAULT_MODE : u32 = 0b000000000000000000000100000_u32;
 
-fn kind_noncopyable() -> kind {
+fn kind_noncopyable() -> Kind {
     kind_(0u32)
 }
 
-fn kind_copyable() -> kind {
+fn kind_copyable() -> Kind {
     kind_(KIND_MASK_COPY)
 }
 
-fn kind_implicitly_copyable() -> kind {
+fn kind_implicitly_copyable() -> Kind {
     kind_(KIND_MASK_IMPLICIT | KIND_MASK_COPY)
 }
 
-fn kind_safe_for_default_mode() -> kind {
+fn kind_safe_for_default_mode() -> Kind {
     // similar to implicit copy, but always includes vectors and strings
     kind_(KIND_MASK_DEFAULT_MODE | KIND_MASK_IMPLICIT | KIND_MASK_COPY)
 }
 
-fn kind_implicitly_sendable() -> kind {
+fn kind_implicitly_sendable() -> Kind {
     kind_(KIND_MASK_IMPLICIT | KIND_MASK_COPY | KIND_MASK_SEND)
 }
 
-fn kind_safe_for_default_mode_send() -> kind {
+fn kind_safe_for_default_mode_send() -> Kind {
     // similar to implicit copy, but always includes vectors and strings
     kind_(KIND_MASK_DEFAULT_MODE | KIND_MASK_IMPLICIT |
           KIND_MASK_COPY | KIND_MASK_SEND)
 }
 
 
-fn kind_send_copy() -> kind {
+fn kind_send_copy() -> Kind {
     kind_(KIND_MASK_COPY | KIND_MASK_SEND)
 }
 
-fn kind_send_only() -> kind {
+fn kind_send_only() -> Kind {
     kind_(KIND_MASK_SEND)
 }
 
-fn kind_const() -> kind {
+fn kind_const() -> Kind {
     kind_(KIND_MASK_CONST)
 }
 
-fn kind_owned() -> kind {
+fn kind_owned() -> Kind {
     kind_(KIND_MASK_OWNED)
 }
 
-fn kind_top() -> kind {
+fn kind_top() -> Kind {
     kind_(0xffffffffu32)
 }
 
-fn remove_const(k: kind) -> kind {
+fn remove_const(k: Kind) -> Kind {
     k - kind_const()
 }
 
-fn remove_implicit(k: kind) -> kind {
+fn remove_implicit(k: Kind) -> Kind {
     k - kind_(KIND_MASK_IMPLICIT | KIND_MASK_DEFAULT_MODE)
 }
 
-fn remove_send(k: kind) -> kind {
+fn remove_send(k: Kind) -> Kind {
     k - kind_(KIND_MASK_SEND)
 }
 
-fn remove_owned_send(k: kind) -> kind {
+fn remove_owned_send(k: Kind) -> Kind {
     k - kind_(KIND_MASK_OWNED) - kind_(KIND_MASK_SEND)
 }
 
-fn remove_copyable(k: kind) -> kind {
+fn remove_copyable(k: Kind) -> Kind {
     k - kind_(KIND_MASK_COPY | KIND_MASK_DEFAULT_MODE)
 }
 
-impl kind : ops::BitAnd<kind,kind> {
-    pure fn bitand(other: &kind) -> kind {
+impl Kind : ops::BitAnd<Kind,Kind> {
+    pure fn bitand(other: &Kind) -> Kind {
         unsafe {
             lower_kind(self, (*other))
         }
     }
 }
 
-impl kind : ops::BitOr<kind,kind> {
-    pure fn bitor(other: &kind) -> kind {
+impl Kind : ops::BitOr<Kind,Kind> {
+    pure fn bitor(other: &Kind) -> Kind {
         unsafe {
             raise_kind(self, (*other))
         }
     }
 }
 
-impl kind : ops::Sub<kind,kind> {
-    pure fn sub(other: &kind) -> kind {
+impl Kind : ops::Sub<Kind,Kind> {
+    pure fn sub(other: &Kind) -> Kind {
         unsafe {
             kind_(*self & !*(*other))
         }
@@ -1892,27 +1892,27 @@ impl kind : ops::Sub<kind,kind> {
 // Using these query functions is preferable to direct comparison or matching
 // against the kind constants, as we may modify the kind hierarchy in the
 // future.
-pure fn kind_can_be_implicitly_copied(k: kind) -> bool {
+pure fn kind_can_be_implicitly_copied(k: Kind) -> bool {
     *k & KIND_MASK_IMPLICIT == KIND_MASK_IMPLICIT
 }
 
-pure fn kind_is_safe_for_default_mode(k: kind) -> bool {
+pure fn kind_is_safe_for_default_mode(k: Kind) -> bool {
     *k & KIND_MASK_DEFAULT_MODE == KIND_MASK_DEFAULT_MODE
 }
 
-pure fn kind_can_be_copied(k: kind) -> bool {
+pure fn kind_can_be_copied(k: Kind) -> bool {
     *k & KIND_MASK_COPY == KIND_MASK_COPY
 }
 
-pure fn kind_can_be_sent(k: kind) -> bool {
+pure fn kind_can_be_sent(k: Kind) -> bool {
     *k & KIND_MASK_SEND == KIND_MASK_SEND
 }
 
-pure fn kind_is_owned(k: kind) -> bool {
+pure fn kind_is_owned(k: Kind) -> bool {
     *k & KIND_MASK_OWNED == KIND_MASK_OWNED
 }
 
-fn meta_kind(p: FnMeta) -> kind {
+fn meta_kind(p: FnMeta) -> Kind {
     match p.proto { // XXX consider the kind bounds!
       proto_vstore(vstore_slice(_)) =>
         kind_noncopyable() | kind_(KIND_MASK_DEFAULT_MODE),
@@ -1927,15 +1927,15 @@ fn meta_kind(p: FnMeta) -> kind {
     }
 }
 
-fn kind_lteq(a: kind, b: kind) -> bool {
+fn kind_lteq(a: Kind, b: Kind) -> bool {
     *a & *b == *a
 }
 
-fn lower_kind(a: kind, b: kind) -> kind {
+fn lower_kind(a: Kind, b: Kind) -> Kind {
     kind_(*a & *b)
 }
 
-fn raise_kind(a: kind, b: kind) -> kind {
+fn raise_kind(a: Kind, b: Kind) -> Kind {
     kind_(*a | *b)
 }
 
@@ -1960,7 +1960,7 @@ fn test_kinds() {
 // with the given mutability can have.
 // This is used to prevent objects containing mutable state from being
 // implicitly copied and to compute whether things have const kind.
-fn mutability_kind(m: mutability) -> kind {
+fn mutability_kind(m: mutability) -> Kind {
     match (m) {
       m_mutbl => remove_const(remove_implicit(kind_top())),
       m_const => remove_implicit(kind_top()),
@@ -1968,11 +1968,11 @@ fn mutability_kind(m: mutability) -> kind {
     }
 }
 
-fn mutable_type_kind(cx: ctxt, ty: mt) -> kind {
+fn mutable_type_kind(cx: ctxt, ty: mt) -> Kind {
     lower_kind(mutability_kind(ty.mutbl), type_kind(cx, ty.ty))
 }
 
-fn type_kind(cx: ctxt, ty: t) -> kind {
+fn type_kind(cx: ctxt, ty: t) -> Kind {
     match cx.kind_cache.find(ty) {
       Some(result) => return result,
       None => {/* fall through */ }
@@ -2550,7 +2550,7 @@ impl bound_region : to_bytes::IterBytes {
     }
 }
 
-impl region : to_bytes::IterBytes {
+impl Region : to_bytes::IterBytes {
     pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
         match self {
           re_bound(ref br) =>
@@ -2763,7 +2763,7 @@ fn is_fn_ty(fty: t) -> bool {
     }
 }
 
-fn ty_region(ty: t) -> region {
+fn ty_region(ty: t) -> Region {
     match get(ty).sty {
       ty_rptr(r, _) => r,
       s => fail fmt!("ty_region() invoked on non-rptr: %?", s)
@@ -4084,8 +4084,8 @@ impl RegionVid : cmp::Eq {
     pure fn ne(other: &RegionVid) -> bool { *self != *(*other) }
 }
 
-impl region : cmp::Eq {
-    pure fn eq(other: &region) -> bool {
+impl Region : cmp::Eq {
+    pure fn eq(other: &Region) -> bool {
         match self {
             re_bound(e0a) => {
                 match (*other) {
@@ -4119,7 +4119,7 @@ impl region : cmp::Eq {
             }
         }
     }
-    pure fn ne(other: &region) -> bool { !self.eq(other) }
+    pure fn ne(other: &Region) -> bool { !self.eq(other) }
 }
 
 impl bound_region : cmp::Eq {
@@ -4367,9 +4367,9 @@ impl param_bound : cmp::Eq {
     pure fn ne(other: &param_bound) -> bool { !self.eq(other) }
 }
 
-impl kind : cmp::Eq {
-    pure fn eq(other: &kind) -> bool { *self == *(*other) }
-    pure fn ne(other: &kind) -> bool { *self != *(*other) }
+impl Kind : cmp::Eq {
+    pure fn eq(other: &Kind) -> bool { *self == *(*other) }
+    pure fn ne(other: &Kind) -> bool { *self != *(*other) }
 }
 
 
diff --git a/src/rustc/middle/typeck.rs b/src/rustc/middle/typeck.rs
index 077d34700b8..8d10343d78e 100644
--- a/src/rustc/middle/typeck.rs
+++ b/src/rustc/middle/typeck.rs
@@ -46,7 +46,6 @@ use syntax::ast_map::node_id_to_str;
 use syntax::ast_util::{local_def, respan, split_trait_methods};
 use syntax::visit;
 use metadata::csearch;
-use driver::session::session;
 use util::common::may_break;
 use syntax::codemap::span;
 use pat_util::{pat_is_variant, pat_id_map, PatIdMap};
diff --git a/src/rustc/middle/typeck/astconv.rs b/src/rustc/middle/typeck/astconv.rs
index 389c1adb016..b8ff637f7dd 100644
--- a/src/rustc/middle/typeck/astconv.rs
+++ b/src/rustc/middle/typeck/astconv.rs
@@ -58,7 +58,7 @@ trait ast_conv {
 
 fn get_region_reporting_err(tcx: ty::ctxt,
                             span: span,
-                            res: Result<ty::region, ~str>) -> ty::region {
+                            res: Result<ty::Region, ~str>) -> ty::Region {
 
     match res {
       result::Ok(r) => r,
@@ -70,7 +70,7 @@ fn get_region_reporting_err(tcx: ty::ctxt,
 }
 
 fn ast_region_to_region<AC: ast_conv, RS: region_scope Copy Owned>(
-    self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::region {
+    self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::Region {
 
     let res = match a_r.node {
         ast::re_static => Ok(ty::re_static),
@@ -155,7 +155,7 @@ const NO_TPS: uint = 2u;
 // internal notion of a type. `getter` is a function that returns the type
 // corresponding to a definition ID:
 fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Owned>(
-    self: AC, rscope: RS, &&ast_ty: @ast::ty) -> ty::t {
+    self: AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t {
 
     fn ast_mt_to_mt<AC: ast_conv, RS: region_scope Copy Owned>(
         self: AC, rscope: RS, mt: ast::mt) -> ty::mt {
diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs
index 9a7f2192cb1..6de249ebc68 100644
--- a/src/rustc/middle/typeck/check.rs
+++ b/src/rustc/middle/typeck/check.rs
@@ -166,20 +166,20 @@ fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t,
 }
 
 // a list of mapping from in-scope-region-names ("isr") to the
-// corresponding ty::region
-type isr_alist = @List<(ty::bound_region, ty::region)>;
+// corresponding ty::Region
+type isr_alist = @List<(ty::bound_region, ty::Region)>;
 
 trait get_and_find_region {
-    fn get(br: ty::bound_region) -> ty::region;
-    fn find(br: ty::bound_region) -> Option<ty::region>;
+    fn get(br: ty::bound_region) -> ty::Region;
+    fn find(br: ty::bound_region) -> Option<ty::Region>;
 }
 
 impl isr_alist: get_and_find_region {
-    fn get(br: ty::bound_region) -> ty::region {
+    fn get(br: ty::bound_region) -> ty::Region {
         self.find(br).get()
     }
 
-    fn find(br: ty::bound_region) -> Option<ty::region> {
+    fn find(br: ty::bound_region) -> Option<ty::Region> {
         for list::each(self) |isr| {
             let (isr_br, isr_r) = *isr;
             if isr_br == br { return Some(isr_r); }
@@ -563,7 +563,7 @@ impl @fn_ctxt: ast_conv {
 
 impl @fn_ctxt {
     fn search_in_scope_regions(br: ty::bound_region)
-        -> Result<ty::region, ~str>
+        -> Result<ty::Region, ~str>
     {
         match self.in_scope_regions.find(br) {
             Some(r) => result::Ok(r),
@@ -581,13 +581,13 @@ impl @fn_ctxt {
 }
 
 impl @fn_ctxt: region_scope {
-    fn anon_region(span: span) -> Result<ty::region, ~str> {
+    fn anon_region(span: span) -> Result<ty::Region, ~str> {
         result::Ok(self.infcx().next_region_var_nb(span))
     }
-    fn self_region(_span: span) -> Result<ty::region, ~str> {
+    fn self_region(_span: span) -> Result<ty::Region, ~str> {
         self.search_in_scope_regions(ty::br_self)
     }
-    fn named_region(_span: span, id: ast::ident) -> Result<ty::region, ~str> {
+    fn named_region(_span: span, id: ast::ident) -> Result<ty::Region, ~str> {
         self.search_in_scope_regions(ty::br_named(id))
     }
 }
@@ -600,7 +600,7 @@ impl @fn_ctxt {
              pprust::expr_to_str(expr, self.tcx().sess.intr()))
     }
 
-    fn block_region() -> ty::region {
+    fn block_region() -> ty::Region {
         ty::re_scope(self.region_lb)
     }
 
@@ -645,7 +645,7 @@ impl @fn_ctxt {
         self.write_ty(node_id, ty::mk_bot(self.tcx()));
     }
 
-    fn to_ty(ast_t: @ast::ty) -> ty::t {
+    fn to_ty(ast_t: @ast::Ty) -> ty::t {
         ast_ty_to_ty(self, self, ast_t)
     }
 
@@ -736,7 +736,7 @@ impl @fn_ctxt {
     }
 
     fn mk_subr(a_is_expected: bool, span: span,
-               sub: ty::region, sup: ty::region) -> Result<(), ty::type_err> {
+               sub: ty::Region, sup: ty::Region) -> Result<(), ty::type_err> {
         infer::mk_subr(self.infcx(), a_is_expected, span, sub, sup)
     }
 
@@ -760,8 +760,8 @@ impl @fn_ctxt {
 
     fn region_var_if_parameterized(rp: Option<ty::region_variance>,
                                    span: span,
-                                   lower_bound: ty::region)
-        -> Option<ty::region>
+                                   lower_bound: ty::Region)
+        -> Option<ty::Region>
     {
         rp.map(
             |_rp| self.infcx().next_region_var_with_lb(span, lower_bound))
@@ -1359,7 +1359,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
 
     // Check field access expressions
     fn check_field(fcx: @fn_ctxt, expr: @ast::expr, is_callee: bool,
-                   base: @ast::expr, field: ast::ident, tys: ~[@ast::ty])
+                   base: @ast::expr, field: ast::ident, tys: ~[@ast::Ty])
         -> bool
     {
         let tcx = fcx.ccx.tcx;
@@ -2443,7 +2443,7 @@ fn instantiate_path(fcx: @fn_ctxt,
                     tpt: ty_param_bounds_and_ty,
                     span: span,
                     node_id: ast::node_id,
-                    region_lb: ty::region) {
+                    region_lb: ty::Region) {
     let ty_param_count = vec::len(*tpt.bounds);
     let ty_substs_len = vec::len(pth.types);
 
diff --git a/src/rustc/middle/typeck/check/alt.rs b/src/rustc/middle/typeck/check/alt.rs
index 24bcc2281fb..caace605198 100644
--- a/src/rustc/middle/typeck/check/alt.rs
+++ b/src/rustc/middle/typeck/check/alt.rs
@@ -112,8 +112,8 @@ fn check_legality_of_move_bindings(fcx: @fn_ctxt,
 type pat_ctxt = {
     fcx: @fn_ctxt,
     map: PatIdMap,
-    alt_region: ty::region,   // Region for the alt as a whole
-    block_region: ty::region, // Region for the block of the arm
+    alt_region: ty::Region,   // Region for the alt as a whole
+    block_region: ty::Region, // Region for the block of the arm
 };
 
 fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path,
diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs
index 04be0047548..eaf1a45afa9 100644
--- a/src/rustc/middle/typeck/check/method.rs
+++ b/src/rustc/middle/typeck/check/method.rs
@@ -654,7 +654,7 @@ impl LookupContext {
         kind: AutoRefKind,
         autoderefs: uint,
         mutbls: &[ast::mutability],
-        mk_autoref_ty: &fn(ast::mutability, ty::region) -> ty::t)
+        mk_autoref_ty: &fn(ast::mutability, ty::Region) -> ty::t)
         -> Option<method_map_entry>
     {
         // This is hokey. We should have mutability inference as a
@@ -930,7 +930,7 @@ impl LookupContext {
 }
 
 fn transform_self_type_for_method(tcx: ty::ctxt,
-                                  self_region: Option<ty::region>,
+                                  self_region: Option<ty::Region>,
                                   impl_ty: ty::t,
                                   self_type: ast::self_ty_)
     -> ty::t
diff --git a/src/rustc/middle/typeck/check/regionck.rs b/src/rustc/middle/typeck/check/regionck.rs
index 0b258da5672..932cdd994da 100644
--- a/src/rustc/middle/typeck/check/regionck.rs
+++ b/src/rustc/middle/typeck/check/regionck.rs
@@ -32,7 +32,7 @@ use middle::ty::{vstore_uniq};
 enum rcx { rcx_({fcx: @fn_ctxt, mut errors_reported: uint}) }
 type rvt = visit::vt<@rcx>;
 
-fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::region {
+fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region {
     let tcx = fcx.tcx();
     match def {
         def_local(node_id, _) | def_arg(node_id, _) | def_self(node_id) |
@@ -335,7 +335,7 @@ fn constrain_auto_ref(
 
 fn constrain_free_variables(
     rcx: @rcx,
-    region: ty::region,
+    region: ty::Region,
     expr: @ast::expr)
 {
     /*!
@@ -373,7 +373,7 @@ fn constrain_free_variables(
 fn constrain_regions_in_type_of_node(
     rcx: @rcx,
     id: ast::node_id,
-    encl_region: ty::region,
+    encl_region: ty::Region,
     span: span) -> bool
 {
     let tcx = rcx.fcx.tcx();
@@ -395,7 +395,7 @@ fn constrain_regions_in_type_of_node(
 
 fn constrain_regions_in_type(
     rcx: @rcx,
-    encl_region: ty::region,
+    encl_region: ty::Region,
     span: span,
     ty: ty::t) -> bool
 {
@@ -417,9 +417,9 @@ fn constrain_regions_in_type(
     return (e == rcx.errors_reported);
 
     fn constrain_region(rcx: @rcx,
-                        encl_region: ty::region,
+                        encl_region: ty::Region,
                         span: span,
-                        region: ty::region) {
+                        region: ty::Region) {
         let tcx = rcx.fcx.ccx.tcx;
 
         debug!("constrain_region(encl_region=%?, region=%?)",
diff --git a/src/rustc/middle/typeck/check/regionmanip.rs b/src/rustc/middle/typeck/check/regionmanip.rs
index 4afb3ad78a6..806b234540c 100644
--- a/src/rustc/middle/typeck/check/regionmanip.rs
+++ b/src/rustc/middle/typeck/check/regionmanip.rs
@@ -10,7 +10,7 @@ fn replace_bound_regions_in_fn_ty(
     isr: isr_alist,
     self_info: Option<self_info>,
     fn_ty: &ty::FnTy,
-    mapf: fn(ty::bound_region) -> ty::region) ->
+    mapf: fn(ty::bound_region) -> ty::Region) ->
     {isr: isr_alist, self_info: Option<self_info>, fn_ty: ty::FnTy} {
 
     // Take self_info apart; the self_ty part is the only one we want
@@ -83,7 +83,7 @@ fn replace_bound_regions_in_fn_ty(
         tcx: ty::ctxt,
         isr: isr_alist,
         tys: ~[ty::t],
-        to_r: fn(ty::bound_region) -> ty::region) -> isr_alist {
+        to_r: fn(ty::bound_region) -> ty::Region) -> isr_alist {
 
         // Takes `isr` (described above), `to_r` (described above),
         // and `r`, a region.  If `r` is anything other than a bound
@@ -93,8 +93,8 @@ fn replace_bound_regions_in_fn_ty(
         // updated isr_alist that now contains a mapping from `r` to
         // the result of calling `to_r` on it.
         fn append_isr(isr: isr_alist,
-                      to_r: fn(ty::bound_region) -> ty::region,
-                      r: ty::region) -> isr_alist {
+                      to_r: fn(ty::bound_region) -> ty::Region,
+                      r: ty::Region) -> isr_alist {
             match r {
               ty::re_free(_, _) | ty::re_static | ty::re_scope(_) |
               ty::re_var(_) => {
diff --git a/src/rustc/middle/typeck/coherence.rs b/src/rustc/middle/typeck/coherence.rs
index 9a9a8dda6e4..189e7377d9c 100644
--- a/src/rustc/middle/typeck/coherence.rs
+++ b/src/rustc/middle/typeck/coherence.rs
@@ -6,7 +6,7 @@
 
 use metadata::csearch::{ProvidedTraitMethodInfo, each_path, get_impl_traits};
 use metadata::csearch::{get_impls_for_mod};
-use metadata::cstore::{cstore, iter_crate_data};
+use metadata::cstore::{CStore, iter_crate_data};
 use metadata::decoder::{dl_def, dl_field, dl_impl};
 use middle::resolve::{Impl, MethodInfo};
 use middle::ty::{ProvidedMethodSource, get, lookup_item_type, subst, t};
@@ -595,7 +595,7 @@ impl CoherenceChecker {
     fn create_impl_from_item(item: @item) -> @Impl {
         fn add_provided_methods(all_methods: &mut ~[@MethodInfo],
                                 all_provided_methods: ~[@ProvidedMethodInfo],
-                                sess: driver::session::session) {
+                                sess: driver::session::Session) {
             for all_provided_methods.each |provided_method| {
                 debug!(
                     "(creating impl) adding provided method `%s` to impl",
@@ -694,7 +694,7 @@ impl CoherenceChecker {
     // External crate handling
 
     fn add_impls_for_module(impls_seen: HashMap<def_id,()>,
-                            crate_store: cstore,
+                            crate_store: CStore,
                             module_def_id: def_id) {
 
         let implementations = get_impls_for_mod(crate_store,
diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs
index 9e51225f172..a5390d8f293 100644
--- a/src/rustc/middle/typeck/collect.rs
+++ b/src/rustc/middle/typeck/collect.rs
@@ -76,7 +76,7 @@ fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) {
 
 impl @crate_ctxt {
     fn to_ty<RS: region_scope Copy Owned>(
-        rs: RS, ast_ty: @ast::ty) -> ty::t {
+        rs: RS, ast_ty: @ast::Ty) -> ty::t {
 
         ast_ty_to_ty(self, rs, ast_ty)
     }
@@ -345,7 +345,7 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span,
 
     // Replaces bound references to the self region with `with_r`.
     fn replace_bound_self(tcx: ty::ctxt, ty: ty::t,
-                          with_r: ty::region) -> ty::t {
+                          with_r: ty::Region) -> ty::t {
         do ty::fold_regions(tcx, ty) |r, _in_fn| {
             if r == ty::re_bound(ty::br_self) {with_r} else {r}
         }
diff --git a/src/rustc/middle/typeck/infer.rs b/src/rustc/middle/typeck/infer.rs
index 96849bf918d..e0465b22c93 100644
--- a/src/rustc/middle/typeck/infer.rs
+++ b/src/rustc/middle/typeck/infer.rs
@@ -258,7 +258,6 @@ use util::ppaux::{ty_to_str, mt_to_str};
 use result::{Result, Ok, Err, map_vec, map_vec2, iter_vec2};
 use ty::{mk_fn, type_is_bot};
 use check::regionmanip::{replace_bound_regions_in_fn_ty};
-use driver::session::session;
 use util::common::{indent, indenter};
 use ast::{unsafe_fn, impure_fn, pure_fn, extern_fn};
 use ast::{m_const, m_imm, m_mutbl};
@@ -275,7 +274,7 @@ use unify::{vals_and_bindings, root};
 use integral::{int_ty_set, int_ty_set_all};
 use combine::{combine_fields, eq_tys};
 use assignment::Assign;
-use to_str::to_str;
+use to_str::ToStr;
 
 use sub::Sub;
 use lub::Lub;
@@ -385,7 +384,7 @@ fn can_mk_subty(cx: infer_ctxt, a: ty::t, b: ty::t) -> ures {
 }
 
 fn mk_subr(cx: infer_ctxt, a_is_expected: bool, span: span,
-           a: ty::region, b: ty::region) -> ures {
+           a: ty::Region, b: ty::Region) -> ures {
     debug!("mk_subr(%s <: %s)", a.to_str(cx), b.to_str(cx));
     do indent {
         do cx.commit {
@@ -431,8 +430,8 @@ fn resolve_type(cx: infer_ctxt, a: ty::t, modes: uint)
     resolver(cx, modes).resolve_type_chk(a)
 }
 
-fn resolve_region(cx: infer_ctxt, r: ty::region, modes: uint)
-    -> fres<ty::region> {
+fn resolve_region(cx: infer_ctxt, r: ty::Region, modes: uint)
+    -> fres<ty::Region> {
     resolver(cx, modes).resolve_region_chk(r)
 }
 
@@ -628,12 +627,12 @@ impl infer_ctxt {
         ty::mk_int_var(self.tcx, self.next_int_var_id())
     }
 
-    fn next_region_var_nb(span: span) -> ty::region {
+    fn next_region_var_nb(span: span) -> ty::Region {
         ty::re_var(self.region_vars.new_region_var(span))
     }
 
     fn next_region_var_with_lb(span: span,
-                               lb_region: ty::region) -> ty::region {
+                               lb_region: ty::Region) -> ty::Region {
         let region_var = self.next_region_var_nb(span);
 
         // add lb_region as a lower bound on the newly built variable
@@ -644,7 +643,7 @@ impl infer_ctxt {
         return region_var;
     }
 
-    fn next_region_var(span: span, scope_id: ast::node_id) -> ty::region {
+    fn next_region_var(span: span, scope_id: ast::node_id) -> ty::Region {
         self.next_region_var_with_lb(span, ty::re_scope(scope_id))
     }
 
diff --git a/src/rustc/middle/typeck/infer/assignment.rs b/src/rustc/middle/typeck/infer/assignment.rs
index 53731551df5..a5af58904dd 100644
--- a/src/rustc/middle/typeck/infer/assignment.rs
+++ b/src/rustc/middle/typeck/infer/assignment.rs
@@ -48,7 +48,7 @@
 // A.  But this upper-bound might be stricter than what is truly
 // needed.
 
-use to_str::to_str;
+use to_str::ToStr;
 use combine::combine_fields;
 
 fn to_ares(+c: cres<ty::t>) -> ares {
@@ -190,7 +190,7 @@ priv impl Assign {
                   a: ty::t,
                   nr_b: ty::t,
                   m: ast::mutability,
-                  r_b: ty::region) -> ares {
+                  r_b: ty::Region) -> ares {
 
         debug!("try_assign(a=%s, nr_b=%s, m=%?, r_b=%s)",
                a.to_str(self.infcx),
diff --git a/src/rustc/middle/typeck/infer/combine.rs b/src/rustc/middle/typeck/infer/combine.rs
index bdda45c1168..342a2ce2b76 100644
--- a/src/rustc/middle/typeck/infer/combine.rs
+++ b/src/rustc/middle/typeck/infer/combine.rs
@@ -44,7 +44,7 @@
 // terms of error reporting, although we do not do that properly right
 // now.
 
-use to_str::to_str;
+use to_str::ToStr;
 use ty::{FnTyBase, FnMeta, FnSig};
 
 trait combine {
@@ -72,8 +72,8 @@ trait combine {
     fn protos(p1: ty::fn_proto, p2: ty::fn_proto) -> cres<ty::fn_proto>;
     fn ret_styles(r1: ret_style, r2: ret_style) -> cres<ret_style>;
     fn purities(a: purity, b: purity) -> cres<purity>;
-    fn contraregions(a: ty::region, b: ty::region) -> cres<ty::region>;
-    fn regions(a: ty::region, b: ty::region) -> cres<ty::region>;
+    fn contraregions(a: ty::Region, b: ty::Region) -> cres<ty::Region>;
+    fn regions(a: ty::Region, b: ty::Region) -> cres<ty::Region>;
     fn vstores(vk: ty::terr_vstore_kind,
                a: ty::vstore, b: ty::vstore) -> cres<ty::vstore>;
 }
@@ -103,7 +103,7 @@ fn eq_tys<C: combine>(self: &C, a: ty::t, b: ty::t) -> ures {
     }
 }
 
-fn eq_regions<C: combine>(self: &C, a: ty::region, b: ty::region) -> ures {
+fn eq_regions<C: combine>(self: &C, a: ty::Region, b: ty::Region) -> ures {
     debug!("eq_regions(%s, %s)",
            a.to_str(self.infcx()),
            b.to_str(self.infcx()));
@@ -127,8 +127,8 @@ fn eq_regions<C: combine>(self: &C, a: ty::region, b: ty::region) -> ures {
 
 fn eq_opt_regions<C:combine>(
     self: &C,
-    a: Option<ty::region>,
-    b: Option<ty::region>) -> cres<Option<ty::region>> {
+    a: Option<ty::Region>,
+    b: Option<ty::Region>) -> cres<Option<ty::Region>> {
 
     match (a, b) {
       (None, None) => {
@@ -160,9 +160,9 @@ fn super_substs<C:combine>(
     fn relate_region_param<C:combine>(
         self: &C,
         did: ast::def_id,
-        a: Option<ty::region>,
-        b: Option<ty::region>)
-        -> cres<Option<ty::region>>
+        a: Option<ty::Region>,
+        b: Option<ty::Region>)
+        -> cres<Option<ty::Region>>
     {
         let polyty = ty::lookup_item_type(self.infcx().tcx, did);
         match (polyty.region_param, a, b) {
diff --git a/src/rustc/middle/typeck/infer/glb.rs b/src/rustc/middle/typeck/infer/glb.rs
index a8676a63b88..77e753fa220 100644
--- a/src/rustc/middle/typeck/infer/glb.rs
+++ b/src/rustc/middle/typeck/infer/glb.rs
@@ -1,6 +1,6 @@
 use combine::*;
 use lattice::*;
-use to_str::to_str;
+use to_str::ToStr;
 
 enum Glb = combine_fields;  // "greatest lower bound" (common subtype)
 
@@ -109,7 +109,7 @@ impl Glb: combine {
         }
     }
 
-    fn regions(a: ty::region, b: ty::region) -> cres<ty::region> {
+    fn regions(a: ty::Region, b: ty::Region) -> cres<ty::Region> {
         debug!("%s.regions(%?, %?)",
                self.tag(),
                a.to_str(self.infcx),
@@ -120,7 +120,7 @@ impl Glb: combine {
         }
     }
 
-    fn contraregions(a: ty::region, b: ty::region) -> cres<ty::region> {
+    fn contraregions(a: ty::Region, b: ty::Region) -> cres<ty::Region> {
         Lub(*self).regions(a, b)
     }
 
diff --git a/src/rustc/middle/typeck/infer/integral.rs b/src/rustc/middle/typeck/infer/integral.rs
index 168709596dc..1b23cb52b20 100644
--- a/src/rustc/middle/typeck/infer/integral.rs
+++ b/src/rustc/middle/typeck/infer/integral.rs
@@ -4,7 +4,7 @@ Code related to integral type inference.
 
 */
 
-use to_str::to_str;
+use to_str::ToStr;
 
 // Bitvector to represent sets of integral types
 enum int_ty_set = uint;
diff --git a/src/rustc/middle/typeck/infer/lattice.rs b/src/rustc/middle/typeck/infer/lattice.rs
index 04133cab9d7..699613e8ae6 100644
--- a/src/rustc/middle/typeck/infer/lattice.rs
+++ b/src/rustc/middle/typeck/infer/lattice.rs
@@ -1,6 +1,6 @@
 use combine::*;
 use unify::*;
-use to_str::to_str;
+use to_str::ToStr;
 
 // ______________________________________________________________________
 // Lattice operations on variables
diff --git a/src/rustc/middle/typeck/infer/lub.rs b/src/rustc/middle/typeck/infer/lub.rs
index 093da5caec8..dcff863a126 100644
--- a/src/rustc/middle/typeck/infer/lub.rs
+++ b/src/rustc/middle/typeck/infer/lub.rs
@@ -1,6 +1,6 @@
 use combine::*;
 use lattice::*;
-use to_str::to_str;
+use to_str::ToStr;
 
 enum Lub = combine_fields;  // "subtype", "subregion" etc
 
@@ -88,11 +88,11 @@ impl Lub: combine {
         }
     }
 
-    fn contraregions(a: ty::region, b: ty::region) -> cres<ty::region> {
+    fn contraregions(a: ty::Region, b: ty::Region) -> cres<ty::Region> {
         return Glb(*self).regions(a, b);
     }
 
-    fn regions(a: ty::region, b: ty::region) -> cres<ty::region> {
+    fn regions(a: ty::Region, b: ty::Region) -> cres<ty::Region> {
         debug!("%s.regions(%?, %?)",
                self.tag(),
                a.to_str(self.infcx),
diff --git a/src/rustc/middle/typeck/infer/region_var_bindings.rs b/src/rustc/middle/typeck/infer/region_var_bindings.rs
index 8bbdab74d23..86a872341f5 100644
--- a/src/rustc/middle/typeck/infer/region_var_bindings.rs
+++ b/src/rustc/middle/typeck/infer/region_var_bindings.rs
@@ -312,10 +312,10 @@ use std::map::HashMap;
 use std::cell::{Cell, empty_cell};
 use std::list::{List, Nil, Cons};
 
-use ty::{region, RegionVid};
 use region::is_subregion_of;
+use ty::{Region, RegionVid};
 use syntax::codemap;
-use to_str::to_str;
+use to_str::ToStr;
 use util::ppaux::note_and_explain_region;
 
 export RegionVarBindings;
@@ -325,8 +325,8 @@ export glb_regions;
 
 enum Constraint {
     ConstrainVarSubVar(RegionVid, RegionVid),
-    ConstrainRegSubVar(region, RegionVid),
-    ConstrainVarSubReg(RegionVid, region)
+    ConstrainRegSubVar(Region, RegionVid),
+    ConstrainVarSubReg(RegionVid, Region)
 }
 
 impl Constraint : cmp::Eq {
@@ -365,8 +365,8 @@ impl Constraint : to_bytes::IterBytes {
 }
 
 struct TwoRegions {
-    a: region,
-    b: region,
+    a: Region,
+    b: Region,
 }
 
 impl TwoRegions : cmp::Eq {
@@ -394,7 +394,7 @@ type CombineMap = HashMap<TwoRegions, RegionVid>;
 struct RegionVarBindings {
     tcx: ty::ctxt,
     var_spans: DVec<span>,
-    values: Cell<~[ty::region]>,
+    values: Cell<~[ty::Region]>,
     constraints: HashMap<Constraint, span>,
     lubs: CombineMap,
     glbs: CombineMap,
@@ -501,7 +501,7 @@ impl RegionVarBindings {
         }
     }
 
-    fn make_subregion(span: span, sub: region, sup: region) -> cres<()> {
+    fn make_subregion(span: span, sub: Region, sup: Region) -> cres<()> {
         // cannot add constraints once regions are resolved
         assert self.values.is_empty();
 
@@ -529,7 +529,7 @@ impl RegionVarBindings {
         }
     }
 
-    fn lub_regions(span: span, a: region, b: region) -> cres<region> {
+    fn lub_regions(span: span, a: Region, b: Region) -> cres<Region> {
         // cannot add constraints once regions are resolved
         assert self.values.is_empty();
 
@@ -551,7 +551,7 @@ impl RegionVarBindings {
         }
     }
 
-    fn glb_regions(span: span, a: region, b: region) -> cres<region> {
+    fn glb_regions(span: span, a: Region, b: Region) -> cres<Region> {
         // cannot add constraints once regions are resolved
         assert self.values.is_empty();
 
@@ -574,7 +574,7 @@ impl RegionVarBindings {
         }
     }
 
-    fn resolve_var(rid: RegionVid) -> ty::region {
+    fn resolve_var(rid: RegionVid) -> ty::Region {
         debug!("RegionVarBindings: resolve_var(%?=%u)", rid, *rid);
         if self.values.is_empty() {
             self.tcx.sess.span_bug(
@@ -586,9 +586,9 @@ impl RegionVarBindings {
         self.values.with_ref(|values| values[*rid])
     }
 
-    fn combine_vars(combines: CombineMap, a: region, b: region, span: span,
-                    relate: fn(old_r: region, new_r: region) -> cres<()>)
-        -> cres<region> {
+    fn combine_vars(combines: CombineMap, a: Region, b: Region, span: span,
+                    relate: fn(old_r: Region, new_r: Region) -> cres<()>)
+        -> cres<Region> {
 
         let vars = TwoRegions { a: a, b: b };
         match combines.find(vars) {
@@ -623,11 +623,11 @@ impl RegionVarBindings {
 }
 
 priv impl RegionVarBindings {
-    fn is_subregion_of(sub: region, sup: region) -> bool {
+    fn is_subregion_of(sub: Region, sup: Region) -> bool {
         is_subregion_of(self.tcx.region_map, sub, sup)
     }
 
-    fn lub_concrete_regions(+a: region, +b: region) -> region {
+    fn lub_concrete_regions(+a: Region, +b: Region) -> Region {
         match (a, b) {
           (ty::re_static, _) | (_, ty::re_static) => {
             ty::re_static // nothing lives longer than static
@@ -682,7 +682,7 @@ priv impl RegionVarBindings {
         }
     }
 
-    fn glb_concrete_regions(+a: region, +b: region) -> cres<region> {
+    fn glb_concrete_regions(+a: Region, +b: Region) -> cres<Region> {
         match (a, b) {
           (ty::re_static, r) | (r, ty::re_static) => {
             // static lives longer than everything else
@@ -771,7 +771,7 @@ impl Classification : cmp::Eq {
     pure fn ne(other: &Classification) -> bool { !self.eq(other) }
 }
 
-enum GraphNodeValue { NoValue, Value(region), ErrorValue }
+enum GraphNodeValue { NoValue, Value(Region), ErrorValue }
 
 struct GraphNode {
     span: span,
@@ -792,7 +792,7 @@ struct Graph {
 }
 
 struct SpannedRegion {
-    region: region,
+    region: Region,
     span: span,
 }
 
@@ -803,7 +803,7 @@ fn TwoRegionsMap() -> TwoRegionsMap {
 }
 
 impl RegionVarBindings {
-    fn infer_variable_values() -> ~[region] {
+    fn infer_variable_values() -> ~[Region] {
         let graph = self.construct_graph();
         self.expansion(&graph);
         self.contraction(&graph);
@@ -895,7 +895,7 @@ impl RegionVarBindings {
         }
     }
 
-    fn expand_node(a_region: region,
+    fn expand_node(a_region: Region,
                    b_vid: RegionVid,
                    b_node: &GraphNode) -> bool {
         debug!("expand_node(%?, %? == %?)",
@@ -955,7 +955,7 @@ impl RegionVarBindings {
 
     fn contract_node(a_vid: RegionVid,
                      a_node: &GraphNode,
-                     b_region: region) -> bool {
+                     b_region: Region) -> bool {
         debug!("contract_node(%? == %?/%?, %?)",
                a_vid, a_node.value, a_node.classification, b_region);
 
@@ -985,8 +985,8 @@ impl RegionVarBindings {
         fn check_node(self: &RegionVarBindings,
                       a_vid: RegionVid,
                       a_node: &GraphNode,
-                      a_region: region,
-                      b_region: region) -> bool {
+                      a_region: Region,
+                      b_region: Region) -> bool {
             if !self.is_subregion_of(a_region, b_region) {
                 debug!("Setting %? to ErrorValue: %? not subregion of %?",
                        a_vid, a_region, b_region);
@@ -998,8 +998,8 @@ impl RegionVarBindings {
         fn adjust_node(self: &RegionVarBindings,
                        a_vid: RegionVid,
                        a_node: &GraphNode,
-                       a_region: region,
-                       b_region: region) -> bool {
+                       a_region: Region,
+                       b_region: Region) -> bool {
             match self.glb_concrete_regions(a_region, b_region) {
               Ok(glb) => {
                 if glb == a_region {
@@ -1040,7 +1040,7 @@ impl RegionVarBindings {
         debug!("---- %s Complete after %u iteration(s)", tag, iteration);
     }
 
-    fn extract_regions_and_report_errors(graph: &Graph) -> ~[region] {
+    fn extract_regions_and_report_errors(graph: &Graph) -> ~[Region] {
         let dup_map = TwoRegionsMap();
         graph.nodes.mapi(|idx, node| {
             match node.value {
@@ -1073,8 +1073,8 @@ impl RegionVarBindings {
 
     // Used to suppress reporting the same basic error over and over
     fn is_reported(dup_map: TwoRegionsMap,
-                   r_a: region,
-                   r_b: region) -> bool {
+                   r_a: Region,
+                   r_b: Region) -> bool {
         let key = TwoRegions { a: r_a, b: r_b };
         !dup_map.insert(key, ())
     }
diff --git a/src/rustc/middle/typeck/infer/resolve.rs b/src/rustc/middle/typeck/infer/resolve.rs
index 2a851a5f7bb..5a55fbf9a5d 100644
--- a/src/rustc/middle/typeck/infer/resolve.rs
+++ b/src/rustc/middle/typeck/infer/resolve.rs
@@ -35,7 +35,7 @@
 // probably better off writing `resolve_all - resolve_ivar`.
 
 use integral::*;
-use to_str::to_str;
+use to_str::ToStr;
 
 const resolve_nested_tvar: uint = 0b00000001;
 const resolve_rvar: uint        = 0b00000010;
@@ -98,7 +98,7 @@ impl resolve_state {
         }
     }
 
-    fn resolve_region_chk(orig: ty::region) -> fres<ty::region> {
+    fn resolve_region_chk(orig: ty::Region) -> fres<ty::Region> {
         self.err = None;
         let resolved = indent(|| self.resolve_region(orig) );
         match self.err {
@@ -145,7 +145,7 @@ impl resolve_state {
         }
     }
 
-    fn resolve_region(orig: ty::region) -> ty::region {
+    fn resolve_region(orig: ty::Region) -> ty::Region {
         debug!("Resolve_region(%s)", orig.to_str(self.infcx));
         match orig {
           ty::re_var(rid) => self.resolve_region_var(rid),
@@ -153,14 +153,14 @@ impl resolve_state {
         }
     }
 
-    fn resolve_region_var(rid: RegionVid) -> ty::region {
+    fn resolve_region_var(rid: RegionVid) -> ty::Region {
         if !self.should(resolve_rvar) {
             return ty::re_var(rid)
         }
         self.infcx.region_vars.resolve_var(rid)
     }
 
-    fn assert_not_rvar(rid: RegionVid, r: ty::region) {
+    fn assert_not_rvar(rid: RegionVid, r: ty::Region) {
         match r {
           ty::re_var(rid2) => {
             self.err = Some(region_var_bound_by_region_var(rid, rid2));
diff --git a/src/rustc/middle/typeck/infer/sub.rs b/src/rustc/middle/typeck/infer/sub.rs
index e6bcdf3e71f..0aba993512b 100644
--- a/src/rustc/middle/typeck/infer/sub.rs
+++ b/src/rustc/middle/typeck/infer/sub.rs
@@ -1,6 +1,6 @@
 use combine::*;
 use unify::*;
-use to_str::to_str;
+use to_str::ToStr;
 
 enum Sub = combine_fields;  // "subtype", "subregion" etc
 
@@ -20,14 +20,14 @@ impl Sub: combine {
         Sub(opp).tys(b, a)
     }
 
-    fn contraregions(a: ty::region, b: ty::region) -> cres<ty::region> {
+    fn contraregions(a: ty::Region, b: ty::Region) -> cres<ty::Region> {
         let opp = combine_fields {
             a_is_expected: !self.a_is_expected,.. *self
         };
         Sub(opp).regions(b, a)
     }
 
-    fn regions(a: ty::region, b: ty::region) -> cres<ty::region> {
+    fn regions(a: ty::Region, b: ty::Region) -> cres<ty::Region> {
         debug!("%s.regions(%s, %s)",
                self.tag(),
                a.to_str(self.infcx),
diff --git a/src/rustc/middle/typeck/infer/to_str.rs b/src/rustc/middle/typeck/infer/to_str.rs
index 7acfdcac424..c98a217a746 100644
--- a/src/rustc/middle/typeck/infer/to_str.rs
+++ b/src/rustc/middle/typeck/infer/to_str.rs
@@ -1,29 +1,29 @@
 use integral::{int_ty_set};
 use unify::{var_value, redirect, root};
 
-trait to_str {
+trait ToStr {
     fn to_str(cx: infer_ctxt) -> ~str;
 }
 
-impl ty::t: to_str {
+impl ty::t: ToStr {
     fn to_str(cx: infer_ctxt) -> ~str {
         ty_to_str(cx.tcx, self)
     }
 }
 
-impl ty::mt: to_str {
+impl ty::mt: ToStr {
     fn to_str(cx: infer_ctxt) -> ~str {
         mt_to_str(cx.tcx, self)
     }
 }
 
-impl ty::region: to_str {
+impl ty::Region: ToStr {
     fn to_str(cx: infer_ctxt) -> ~str {
         util::ppaux::region_to_str(cx.tcx, self)
     }
 }
 
-impl<V:Copy to_str> bound<V>: to_str {
+impl<V:Copy ToStr> bound<V>: ToStr {
     fn to_str(cx: infer_ctxt) -> ~str {
         match self {
           Some(v) => v.to_str(cx),
@@ -32,7 +32,7 @@ impl<V:Copy to_str> bound<V>: to_str {
     }
 }
 
-impl<T:Copy to_str> bounds<T>: to_str {
+impl<T:Copy ToStr> bounds<T>: ToStr {
     fn to_str(cx: infer_ctxt) -> ~str {
         fmt!("{%s <: %s}",
              self.lb.to_str(cx),
@@ -40,7 +40,7 @@ impl<T:Copy to_str> bounds<T>: to_str {
     }
 }
 
-impl int_ty_set: to_str {
+impl int_ty_set: ToStr {
     fn to_str(_cx: infer_ctxt) -> ~str {
         match self {
           int_ty_set(v) => uint::to_str(v, 10u)
@@ -48,7 +48,7 @@ impl int_ty_set: to_str {
     }
 }
 
-impl<V:Copy vid, T:Copy to_str> var_value<V, T>: to_str {
+impl<V:Copy vid, T:Copy ToStr> var_value<V, T>: ToStr {
     fn to_str(cx: infer_ctxt) -> ~str {
         match self {
           redirect(vid) => fmt!("redirect(%s)", vid.to_str()),
diff --git a/src/rustc/middle/typeck/infer/unify.rs b/src/rustc/middle/typeck/infer/unify.rs
index 7ccbaa40ada..f865705563c 100644
--- a/src/rustc/middle/typeck/infer/unify.rs
+++ b/src/rustc/middle/typeck/infer/unify.rs
@@ -1,6 +1,6 @@
 use combine::combine;
 use integral::*;
-use to_str::to_str;
+use to_str::ToStr;
 use std::smallintmap::SmallIntMap;
 
 enum var_value<V:Copy, T:Copy> {
@@ -46,7 +46,7 @@ impl infer_ctxt {
         }
     }
 
-    fn set<V:Copy vid, T:Copy to_str>(
+    fn set<V:Copy vid, T:Copy ToStr>(
         vb: &vals_and_bindings<V, T>, vid: V,
         +new_v: var_value<V, T>) {
 
diff --git a/src/rustc/middle/typeck/rscope.rs b/src/rustc/middle/typeck/rscope.rs
index 9b9695088f3..d379607d6a8 100644
--- a/src/rustc/middle/typeck/rscope.rs
+++ b/src/rustc/middle/typeck/rscope.rs
@@ -2,21 +2,21 @@ use result::Result;
 use syntax::parse::token::special_idents;
 
 trait region_scope {
-    fn anon_region(span: span) -> Result<ty::region, ~str>;
-    fn self_region(span: span) -> Result<ty::region, ~str>;
-    fn named_region(span: span, id: ast::ident) -> Result<ty::region, ~str>;
+    fn anon_region(span: span) -> Result<ty::Region, ~str>;
+    fn self_region(span: span) -> Result<ty::Region, ~str>;
+    fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str>;
 }
 
 enum empty_rscope { empty_rscope }
 impl empty_rscope: region_scope {
-    fn anon_region(_span: span) -> Result<ty::region, ~str> {
+    fn anon_region(_span: span) -> Result<ty::Region, ~str> {
         result::Ok(ty::re_static)
     }
-    fn self_region(_span: span) -> Result<ty::region, ~str> {
+    fn self_region(_span: span) -> Result<ty::Region, ~str> {
         result::Err(~"only the static region is allowed here")
     }
     fn named_region(_span: span, _id: ast::ident)
-        -> Result<ty::region, ~str>
+        -> Result<ty::Region, ~str>
     {
         result::Err(~"only the static region is allowed here")
     }
@@ -24,17 +24,17 @@ impl empty_rscope: region_scope {
 
 enum type_rscope = Option<ty::region_variance>;
 impl type_rscope: region_scope {
-    fn anon_region(_span: span) -> Result<ty::region, ~str> {
+    fn anon_region(_span: span) -> Result<ty::Region, ~str> {
         match *self {
           Some(_) => result::Ok(ty::re_bound(ty::br_self)),
           None => result::Err(~"to use region types here, the containing \
                                 type must be declared with a region bound")
         }
     }
-    fn self_region(span: span) -> Result<ty::region, ~str> {
+    fn self_region(span: span) -> Result<ty::Region, ~str> {
         self.anon_region(span)
     }
-    fn named_region(span: span, id: ast::ident) -> Result<ty::region, ~str> {
+    fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> {
         do empty_rscope.named_region(span, id).chain_err |_e| {
             result::Err(~"named regions other than `self` are not \
                           allowed as part of a type declaration")
@@ -42,26 +42,26 @@ impl type_rscope: region_scope {
     }
 }
 
-fn bound_self_region(rp: Option<ty::region_variance>) -> Option<ty::region> {
+fn bound_self_region(rp: Option<ty::region_variance>) -> Option<ty::Region> {
     match rp {
       Some(_) => Some(ty::re_bound(ty::br_self)),
       None => None
     }
 }
 
-enum anon_rscope = {anon: ty::region, base: region_scope};
-fn in_anon_rscope<RS: region_scope Copy Owned>(self: RS, r: ty::region)
+enum anon_rscope = {anon: ty::Region, base: region_scope};
+fn in_anon_rscope<RS: region_scope Copy Owned>(self: RS, r: ty::Region)
     -> @anon_rscope {
     @anon_rscope({anon: r, base: self as region_scope})
 }
 impl @anon_rscope: region_scope {
-    fn anon_region(_span: span) -> Result<ty::region, ~str> {
+    fn anon_region(_span: span) -> Result<ty::Region, ~str> {
         result::Ok(self.anon)
     }
-    fn self_region(span: span) -> Result<ty::region, ~str> {
+    fn self_region(span: span) -> Result<ty::Region, ~str> {
         self.base.self_region(span)
     }
-    fn named_region(span: span, id: ast::ident) -> Result<ty::region, ~str> {
+    fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> {
         self.base.named_region(span, id)
     }
 }
@@ -76,15 +76,15 @@ fn in_binding_rscope<RS: region_scope Copy Owned>(self: RS)
     @binding_rscope { base: base, anon_bindings: 0 }
 }
 impl @binding_rscope: region_scope {
-    fn anon_region(_span: span) -> Result<ty::region, ~str> {
+    fn anon_region(_span: span) -> Result<ty::Region, ~str> {
         let idx = self.anon_bindings;
         self.anon_bindings += 1;
         result::Ok(ty::re_bound(ty::br_anon(idx)))
     }
-    fn self_region(span: span) -> Result<ty::region, ~str> {
+    fn self_region(span: span) -> Result<ty::Region, ~str> {
         self.base.self_region(span)
     }
-    fn named_region(span: span, id: ast::ident) -> Result<ty::region, ~str> {
+    fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> {
         do self.base.named_region(span, id).chain_err |_e| {
             result::Ok(ty::re_bound(ty::br_named(id)))
         }
diff --git a/src/rustc/util/common.rs b/src/rustc/util/common.rs
index e314a12a676..0c6ec267da8 100644
--- a/src/rustc/util/common.rs
+++ b/src/rustc/util/common.rs
@@ -1,6 +1,5 @@
 use std::map::HashMap;
 use syntax::ast;
-use ast::{ty, pat};
 use syntax::codemap::{span};
 use syntax::visit;
 use syntax::print;
diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs
index 3f8ca0f6e6a..27ace283fa0 100644
--- a/src/rustc/util/ppaux.rs
+++ b/src/rustc/util/ppaux.rs
@@ -6,7 +6,7 @@ use middle::ty::{bound_copy, bound_const, bound_owned, bound_send,
 use middle::ty::{bound_region, br_anon, br_named, br_self, br_cap_avoid};
 use middle::ty::{ck_block, ck_box, ck_uniq, ctxt, field, method};
 use middle::ty::{mt, t, param_bound};
-use middle::ty::{re_bound, re_free, re_scope, re_var, re_static, region};
+use middle::ty::{re_bound, re_free, re_scope, re_var, re_static, Region};
 use middle::ty::{ty_bool, ty_bot, ty_box, ty_class, ty_enum};
 use middle::ty::{ty_estr, ty_evec, ty_float, ty_fn, ty_trait, ty_int};
 use middle::ty::{ty_nil, ty_opaque_box, ty_opaque_closure_ptr, ty_param};
@@ -21,11 +21,10 @@ use syntax::print::pprust::{path_to_str, proto_to_str,
                             mode_to_str, purity_to_str};
 use syntax::{ast, ast_util};
 use syntax::ast_map;
-use driver::session::session;
 
 fn note_and_explain_region(cx: ctxt,
                            prefix: ~str,
-                           region: ty::region,
+                           region: ty::Region,
                            suffix: ~str) {
     match explain_region_and_span(cx, region) {
       (str, Some(span)) => {
@@ -42,13 +41,13 @@ fn note_and_explain_region(cx: ctxt,
 
 /// Returns a string like "the block at 27:31" that attempts to explain a
 /// lifetime in a way it might plausibly be understood.
-fn explain_region(cx: ctxt, region: ty::region) -> ~str {
+fn explain_region(cx: ctxt, region: ty::Region) -> ~str {
   let (res, _) = explain_region_and_span(cx, region);
   return res;
 }
 
 
-fn explain_region_and_span(cx: ctxt, region: ty::region)
+fn explain_region_and_span(cx: ctxt, region: ty::Region)
     -> (~str, Option<span>)
 {
     return match region {
@@ -172,7 +171,7 @@ fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str {
 // In general, if you are giving a region error message,
 // you should use `explain_region()` or, better yet,
 // `note_and_explain_region()`
-fn region_to_str(cx: ctxt, region: region) -> ~str {
+fn region_to_str(cx: ctxt, region: Region) -> ~str {
     if cx.sess.verbose() {
         return fmt!("&%?", region);
     }
@@ -381,7 +380,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str {
 
 fn parameterized(cx: ctxt,
                  base: ~str,
-                 self_r: Option<ty::region>,
+                 self_r: Option<ty::Region>,
                  tps: ~[ty::t]) -> ~str {
 
     let r_str = match self_r {
diff --git a/src/rustdoc/astsrv.rs b/src/rustdoc/astsrv.rs
index 27d4d51a010..7b2c6fe5f0c 100644
--- a/src/rustdoc/astsrv.rs
+++ b/src/rustdoc/astsrv.rs
@@ -10,7 +10,7 @@ non-sendableness.
 use std::map::HashMap;
 use rustc::driver::session;
 use session::{basic_options, options};
-use session::session;
+use session::Session;
 use rustc::driver::driver;
 use syntax::diagnostic;
 use syntax::diagnostic::handler;
@@ -35,7 +35,7 @@ type Ctxt = {
 
 type SrvOwner<T> = fn(srv: Srv) -> T;
 type CtxtHandler<T> = fn~(ctxt: Ctxt) -> T;
-type Parser = fn~(session, ~str) -> @ast::crate;
+type Parser = fn~(Session, ~str) -> @ast::crate;
 
 enum Msg {
     HandleRequest(fn~(Ctxt)),
@@ -101,7 +101,7 @@ fn exec<T:Send>(
     comm::recv(po)
 }
 
-fn build_ctxt(sess: session,
+fn build_ctxt(sess: Session,
               ast: @ast::crate) -> Ctxt {
 
     use rustc::front::config;
@@ -118,7 +118,7 @@ fn build_ctxt(sess: session,
     }
 }
 
-fn build_session() -> session {
+fn build_session() -> Session {
     let sopts: @options = basic_options();
     let codemap = codemap::new_codemap();
     let error_handlers = build_error_handlers(codemap);
@@ -137,7 +137,7 @@ type ErrorHandlers = {
 // Build a custom error handler that will allow us to ignore non-fatal
 // errors
 fn build_error_handlers(
-    codemap: codemap::codemap
+    codemap: codemap::CodeMap
 ) -> ErrorHandlers {
 
     type DiagnosticHandler = {
@@ -156,13 +156,13 @@ fn build_error_handlers(
         fn note(msg: &str) { self.inner.note(msg) }
         fn bug(msg: &str) -> ! { self.inner.bug(msg) }
         fn unimpl(msg: &str) -> ! { self.inner.unimpl(msg) }
-        fn emit(cmsp: Option<(codemap::codemap, codemap::span)>,
+        fn emit(cmsp: Option<(codemap::CodeMap, codemap::span)>,
                 msg: &str, lvl: diagnostic::level) {
             self.inner.emit(cmsp, msg, lvl)
         }
     }
 
-    let emitter = fn@(cmsp: Option<(codemap::codemap, codemap::span)>,
+    let emitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>,
                        msg: &str, lvl: diagnostic::level) {
         diagnostic::emit(cmsp, msg, lvl);
     };
diff --git a/src/rustdoc/parse.rs b/src/rustdoc/parse.rs
index 59d64f18d59..7fc17dfe838 100644
--- a/src/rustdoc/parse.rs
+++ b/src/rustdoc/parse.rs
@@ -20,16 +20,16 @@ fn from_str(source: ~str) -> @ast::crate {
         ~"-", @source, ~[], parse::new_parse_sess(None))
 }
 
-fn from_file_sess(sess: session::session, file: &Path) -> @ast::crate {
+fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate {
     parse::parse_crate_from_file(
         file, cfg(sess, file_input(*file)), sess.parse_sess)
 }
 
-fn from_str_sess(sess: session::session, source: ~str) -> @ast::crate {
+fn from_str_sess(sess: session::Session, source: ~str) -> @ast::crate {
     parse::parse_crate_from_source_str(
         ~"-", @source, cfg(sess, str_input(source)), sess.parse_sess)
 }
 
-fn cfg(sess: session::session, input: driver::input) -> ast::crate_cfg {
+fn cfg(sess: session::Session, input: driver::input) -> ast::crate_cfg {
     driver::default_configuration(sess, ~"rustdoc", input)
 }
diff --git a/src/test/run-pass/issue-2930.rs b/src/test/run-pass/issue-2930.rs
index c480d382adc..bccaeeaf18d 100644
--- a/src/test/run-pass/issue-2930.rs
+++ b/src/test/run-pass/issue-2930.rs
@@ -1,6 +1,6 @@
 proto! stream (
-    stream:send<T:Send> {
-        send(T) -> stream<T>
+    Stream:send<T:Send> {
+        send(T) -> Stream<T>
     }
 )
 
diff --git a/src/test/run-pass/pipe-select.rs b/src/test/run-pass/pipe-select.rs
index 627cdbee9ca..23588de2eca 100644
--- a/src/test/run-pass/pipe-select.rs
+++ b/src/test/run-pass/pipe-select.rs
@@ -14,8 +14,8 @@ proto! oneshot (
 )
 
 proto! stream (
-    stream:send<T:Send> {
-        send(T) -> stream<T>
+    Stream:send<T:Send> {
+        send(T) -> Stream<T>
     }
 )