about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorAlex Crichton <alex@alexcrichton.com>2015-01-21 09:13:51 -0800
committerAlex Crichton <alex@alexcrichton.com>2015-01-21 09:13:51 -0800
commit0c981875e46763a9b3cd53443bf73dfd3e291d18 (patch)
treeb0d1f49551beab62865f5945d588a8a65931c9f5 /src
parent5da25386b3e70a5a538f75fbd5b42a8db04dd93d (diff)
parent3c32cd1be27f321658382e39d34f5d993d99ae8b (diff)
downloadrust-0c981875e46763a9b3cd53443bf73dfd3e291d18.tar.gz
rust-0c981875e46763a9b3cd53443bf73dfd3e291d18.zip
rollup merge of #21340: pshc/libsyntax-no-more-ints
Collaboration with @rylev!

I didn't change `int` in the [quasi-quoter](https://github.com/pshc/rust/blob/99ae1a30f3ca28c0f7e431620560d30e44627124/src/libsyntax/ext/quote.rs#L328), because I'm not sure if there will be adverse effects.

Addresses #21095.
Diffstat (limited to 'src')
-rw-r--r--src/librustc/lint/builtin.rs2
-rw-r--r--src/librustc/util/ppaux.rs2
-rw-r--r--src/librustc_resolve/lib.rs2
-rw-r--r--src/librustc_trans/save/span_utils.rs4
-rw-r--r--src/librustc_trans/trans/common.rs6
-rw-r--r--src/librustc_trans/trans/debuginfo.rs8
-rw-r--r--src/librustc_trans/trans/meth.rs2
-rw-r--r--src/librustdoc/clean/mod.rs4
-rw-r--r--src/libsyntax/abi.rs4
-rw-r--r--src/libsyntax/ast.rs30
-rw-r--r--src/libsyntax/ast_map/mod.rs12
-rw-r--r--src/libsyntax/ast_util.rs24
-rw-r--r--src/libsyntax/attr.rs2
-rw-r--r--src/libsyntax/codemap.rs82
-rw-r--r--src/libsyntax/diagnostic.rs40
-rw-r--r--src/libsyntax/ext/base.rs2
-rw-r--r--src/libsyntax/ext/build.rs18
-rw-r--r--src/libsyntax/ext/deriving/decodable.rs12
-rw-r--r--src/libsyntax/ext/deriving/default.rs2
-rw-r--r--src/libsyntax/ext/deriving/encodable.rs14
-rw-r--r--src/libsyntax/ext/deriving/generic/mod.rs48
-rw-r--r--src/libsyntax/ext/deriving/generic/ty.rs4
-rw-r--r--src/libsyntax/ext/deriving/hash.rs2
-rw-r--r--src/libsyntax/ext/deriving/rand.rs6
-rw-r--r--src/libsyntax/ext/env.rs2
-rw-r--r--src/libsyntax/ext/expand.rs56
-rw-r--r--src/libsyntax/ext/format.rs20
-rw-r--r--src/libsyntax/ext/mtwt.rs12
-rw-r--r--src/libsyntax/ext/quote.rs4
-rw-r--r--src/libsyntax/ext/source_util.rs4
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs52
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs10
-rw-r--r--src/libsyntax/fold.rs8
-rw-r--r--src/libsyntax/parse/lexer/comments.rs24
-rw-r--r--src/libsyntax/parse/lexer/mod.rs60
-rw-r--r--src/libsyntax/parse/mod.rs52
-rw-r--r--src/libsyntax/parse/obsolete.rs2
-rw-r--r--src/libsyntax/parse/parser.rs56
-rw-r--r--src/libsyntax/parse/token.rs8
-rw-r--r--src/libsyntax/print/pp.rs126
-rw-r--r--src/libsyntax/print/pprust.rs90
-rw-r--r--src/libsyntax/test.rs4
-rw-r--r--src/libsyntax/util/interner.rs12
-rw-r--r--src/libsyntax/util/parser_testing.rs4
-rw-r--r--src/libsyntax/util/small_vector.rs34
-rw-r--r--src/test/auxiliary/roman_numerals.rs4
46 files changed, 488 insertions, 488 deletions
diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs
index 8a27cfc510f..59b6520216d 100644
--- a/src/librustc/lint/builtin.rs
+++ b/src/librustc/lint/builtin.rs
@@ -1329,7 +1329,7 @@ impl UnusedMut {
                 let ident = path1.node;
                 if let ast::BindByValue(ast::MutMutable) = mode {
                     if !token::get_ident(ident).get().starts_with("_") {
-                        match mutables.entry(ident.name.uint()) {
+                        match mutables.entry(ident.name.usize()) {
                             Vacant(entry) => { entry.insert(vec![id]); },
                             Occupied(mut entry) => { entry.get_mut().push(id); },
                         }
diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs
index fb44d0cadfa..8427c471eee 100644
--- a/src/librustc/util/ppaux.rs
+++ b/src/librustc/util/ppaux.rs
@@ -164,7 +164,7 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
     fn explain_span(cx: &ctxt, heading: &str, span: Span)
                     -> (String, Option<Span>) {
         let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo);
-        (format!("the {} at {}:{}", heading, lo.line, lo.col.to_uint()),
+        (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()),
          Some(span))
     }
 }
diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs
index 6b45c2443d3..e6173e3ab8e 100644
--- a/src/librustc_resolve/lib.rs
+++ b/src/librustc_resolve/lib.rs
@@ -1967,7 +1967,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
                     let module_name = self.module_to_string(&*search_module);
                     let mut span = span;
                     let msg = if "???" == &module_name[] {
-                        span.hi = span.lo + Pos::from_uint(segment_name.get().len());
+                        span.hi = span.lo + Pos::from_usize(segment_name.get().len());
 
                         match search_parent_externals(name,
                                                      &self.current_module) {
diff --git a/src/librustc_trans/save/span_utils.rs b/src/librustc_trans/save/span_utils.rs
index 77343612ac8..97b3cda006b 100644
--- a/src/librustc_trans/save/span_utils.rs
+++ b/src/librustc_trans/save/span_utils.rs
@@ -40,8 +40,8 @@ impl<'a> SpanUtils<'a> {
         format!("file_name,{},file_line,{},file_col,{},extent_start,{},extent_start_bytes,{},\
                  file_line_end,{},file_col_end,{},extent_end,{},extent_end_bytes,{}",
                 lo_loc.file.name,
-                lo_loc.line, lo_loc.col.to_uint(), lo_pos.to_uint(), lo_pos_byte.to_uint(),
-                hi_loc.line, hi_loc.col.to_uint(), hi_pos.to_uint(), hi_pos_byte.to_uint())
+                lo_loc.line, lo_loc.col.to_usize(), lo_pos.to_usize(), lo_pos_byte.to_usize(),
+                hi_loc.line, hi_loc.col.to_usize(), hi_pos.to_usize(), hi_pos_byte.to_usize())
     }
 
     // sub_span starts at span.lo, so we need to adjust the positions etc.
diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs
index 312fd33ef1f..8a7630e6301 100644
--- a/src/librustc_trans/trans/common.rs
+++ b/src/librustc_trans/trans/common.rs
@@ -275,7 +275,7 @@ pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
 /// Generates a unique symbol based off the name given. This is used to create
 /// unique symbols for things like closures.
 pub fn gensym_name(name: &str) -> PathElem {
-    let num = token::gensym(name).uint();
+    let num = token::gensym(name).usize();
     // use one colon which will get translated to a period by the mangler, and
     // we're guaranteed that `num` is globally unique for this crate.
     PathName(token::gensym(&format!("{}:{}", name, num)[]))
@@ -848,7 +848,7 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va
                                                 !null_terminated as Bool);
 
         let gsym = token::gensym("str");
-        let buf = CString::from_vec(format!("str{}", gsym.uint()).into_bytes());
+        let buf = CString::from_vec(format!("str{}", gsym.usize()).into_bytes());
         let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(sc).to_ref(), buf.as_ptr());
         llvm::LLVMSetInitializer(g, sc);
         llvm::LLVMSetGlobalConstant(g, True);
@@ -873,7 +873,7 @@ pub fn C_binary_slice(cx: &CrateContext, data: &[u8]) -> ValueRef {
         let lldata = C_bytes(cx, data);
 
         let gsym = token::gensym("binary");
-        let name = format!("binary{}", gsym.uint());
+        let name = format!("binary{}", gsym.usize());
         let name = CString::from_vec(name.into_bytes());
         let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(lldata).to_ref(),
                                     name.as_ptr());
diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs
index 2f01f0328e2..e3194817dd3 100644
--- a/src/librustc_trans/trans/debuginfo.rs
+++ b/src/librustc_trans/trans/debuginfo.rs
@@ -1204,7 +1204,7 @@ pub fn set_source_location(fcx: &FunctionContext,
 
                 set_debug_location(cx, DebugLocation::new(scope,
                                                           loc.line,
-                                                          loc.col.to_uint()));
+                                                          loc.col.to_usize()));
             } else {
                 set_debug_location(cx, UnknownLocation);
             }
@@ -1716,7 +1716,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 
     set_debug_location(cx, DebugLocation::new(scope_metadata,
                                               loc.line,
-                                              loc.col.to_uint()));
+                                              loc.col.to_usize()));
     unsafe {
         let instr = llvm::LLVMDIBuilderInsertDeclareAtEnd(
             DIB(cx),
@@ -3279,7 +3279,7 @@ fn create_scope_map(cx: &CrateContext,
                 parent_scope,
                 file_metadata,
                 loc.line as c_uint,
-                loc.col.to_uint() as c_uint)
+                loc.col.to_usize() as c_uint)
         };
 
         scope_stack.push(ScopeStackEntry { scope_metadata: scope_metadata,
@@ -3401,7 +3401,7 @@ fn create_scope_map(cx: &CrateContext,
                                 parent_scope,
                                 file_metadata,
                                 loc.line as c_uint,
-                                loc.col.to_uint() as c_uint)
+                                loc.col.to_usize() as c_uint)
                         };
 
                         scope_stack.push(ScopeStackEntry {
diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs
index 0fb0dffe930..712f5fa53c9 100644
--- a/src/librustc_trans/trans/meth.rs
+++ b/src/librustc_trans/trans/meth.rs
@@ -785,7 +785,7 @@ pub fn make_vtable<I: Iterator<Item=ValueRef>>(ccx: &CrateContext,
     unsafe {
         let tbl = C_struct(ccx, &components[], false);
         let sym = token::gensym("vtable");
-        let buf = CString::from_vec(format!("vtable{}", sym.uint()).into_bytes());
+        let buf = CString::from_vec(format!("vtable{}", sym.usize()).into_bytes());
         let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(),
                                           buf.as_ptr());
         llvm::LLVMSetInitializer(vt_gvar, tbl);
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 7342c9f3e81..adc3a2401a5 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -1872,9 +1872,9 @@ impl Clean<Span> for syntax::codemap::Span {
         Span {
             filename: filename.to_string(),
             loline: lo.line,
-            locol: lo.col.to_uint(),
+            locol: lo.col.to_usize(),
             hiline: hi.line,
-            hicol: hi.col.to_uint(),
+            hicol: hi.col.to_usize(),
         }
     }
 }
diff --git a/src/libsyntax/abi.rs b/src/libsyntax/abi.rs
index 09235ee209c..72b9d4d1e63 100644
--- a/src/libsyntax/abi.rs
+++ b/src/libsyntax/abi.rs
@@ -105,8 +105,8 @@ pub fn all_names() -> Vec<&'static str> {
 
 impl Abi {
     #[inline]
-    pub fn index(&self) -> uint {
-        *self as uint
+    pub fn index(&self) -> usize {
+        *self as usize
     }
 
     #[inline]
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index fcf80410da2..b57c08a7727 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -95,7 +95,7 @@ impl Ident {
 
     pub fn encode_with_hygiene(&self) -> String {
         format!("\x00name_{},ctxt_{}\x00",
-                self.name.uint(),
+                self.name.usize(),
                 self.ctxt)
     }
 }
@@ -152,7 +152,7 @@ impl PartialEq for Ident {
 
 /// A SyntaxContext represents a chain of macro-expandings
 /// and renamings. Each macro expansion corresponds to
-/// a fresh uint
+/// a fresh usize
 
 // I'm representing this syntax context as an index into
 // a table, in order to work around a compiler bug
@@ -181,9 +181,9 @@ impl Name {
         }
     }
 
-    pub fn uint(&self) -> uint {
+    pub fn usize(&self) -> usize {
         let Name(nm) = *self;
-        nm as uint
+        nm as usize
     }
 
     pub fn ident(&self) -> Ident {
@@ -740,7 +740,7 @@ pub enum Expr_ {
     ExprAssign(P<Expr>, P<Expr>),
     ExprAssignOp(BinOp, P<Expr>, P<Expr>),
     ExprField(P<Expr>, SpannedIdent),
-    ExprTupField(P<Expr>, Spanned<uint>),
+    ExprTupField(P<Expr>, Spanned<usize>),
     ExprIndex(P<Expr>, P<Expr>),
     ExprRange(Option<P<Expr>>, Option<P<Expr>>),
 
@@ -839,7 +839,7 @@ pub struct SequenceRepetition {
     /// Whether the sequence can be repeated zero (*), or one or more times (+)
     pub op: KleeneOp,
     /// The number of `MatchNt`s that appear in the sequence (and subsequences)
-    pub num_captures: uint,
+    pub num_captures: usize,
 }
 
 /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
@@ -878,7 +878,7 @@ pub enum TokenTree {
 }
 
 impl TokenTree {
-    pub fn len(&self) -> uint {
+    pub fn len(&self) -> usize {
         match *self {
             TtToken(_, token::DocComment(_)) => 2,
             TtToken(_, token::SpecialVarNt(..)) => 2,
@@ -893,7 +893,7 @@ impl TokenTree {
         }
     }
 
-    pub fn get_tt(&self, index: uint) -> TokenTree {
+    pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
             (&TtToken(sp, token::DocComment(_)), 0) => {
                 TtToken(sp, token::Pound)
@@ -963,7 +963,7 @@ pub enum Mac_ {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
 pub enum StrStyle {
     CookedStr,
-    RawStr(uint)
+    RawStr(usize)
 }
 
 pub type Lit = Spanned<Lit_>;
@@ -992,7 +992,7 @@ pub enum LitIntType {
 }
 
 impl LitIntType {
-    pub fn suffix_len(&self) -> uint {
+    pub fn suffix_len(&self) -> usize {
         match *self {
             UnsuffixedIntLit(_) => 0,
             SignedIntLit(s, _) => s.suffix_len(),
@@ -1113,7 +1113,7 @@ impl fmt::String for IntTy {
 }
 
 impl IntTy {
-    pub fn suffix_len(&self) -> uint {
+    pub fn suffix_len(&self) -> usize {
         match *self {
             TyIs(true) /* i */ => 1,
             TyIs(false) /* is */ | TyI8 => 2,
@@ -1146,7 +1146,7 @@ impl PartialEq for UintTy {
 }
 
 impl UintTy {
-    pub fn suffix_len(&self) -> uint {
+    pub fn suffix_len(&self) -> usize {
         match *self {
             TyUs(true) /* u */ => 1,
             TyUs(false) /* us */ | TyU8 => 2,
@@ -1186,7 +1186,7 @@ impl fmt::String for FloatTy {
 }
 
 impl FloatTy {
-    pub fn suffix_len(&self) -> uint {
+    pub fn suffix_len(&self) -> usize {
         match *self {
             TyF32 | TyF64 => 3, // add F128 handling here
         }
@@ -1274,7 +1274,7 @@ pub enum Ty_ {
     TyPtr(MutTy),
     /// A reference (`&'a T` or `&'a mut T`)
     TyRptr(Option<Lifetime>, MutTy),
-    /// A bare function (e.g. `fn(uint) -> bool`)
+    /// A bare function (e.g. `fn(usize) -> bool`)
     TyBareFn(P<BareFnTy>),
     /// A tuple (`(A, B, C, D,...)`)
     TyTup(Vec<P<Ty>> ),
@@ -1571,7 +1571,7 @@ pub enum AttrStyle {
 }
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
-pub struct AttrId(pub uint);
+pub struct AttrId(pub usize);
 
 /// Doc-comments are promoted to attributes that have is_sugared_doc = true
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)]
diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs
index f462a730d3a..d57a1356def 100644
--- a/src/libsyntax/ast_map/mod.rs
+++ b/src/libsyntax/ast_map/mod.rs
@@ -264,12 +264,12 @@ pub struct Map<'ast> {
 }
 
 impl<'ast> Map<'ast> {
-    fn entry_count(&self) -> uint {
+    fn entry_count(&self) -> usize {
         self.map.borrow().len()
     }
 
     fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
-        self.map.borrow().get(id as uint).map(|e| *e)
+        self.map.borrow().get(id as usize).map(|e| *e)
     }
 
     pub fn krate(&self) -> &'ast Crate {
@@ -652,7 +652,7 @@ impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> {
     fn next(&mut self) -> Option<NodeId> {
         loop {
             let idx = self.idx;
-            if idx as uint >= self.map.entry_count() {
+            if idx as usize >= self.map.entry_count() {
                 return None;
             }
             self.idx += 1;
@@ -744,10 +744,10 @@ impl<'ast> NodeCollector<'ast> {
     fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) {
         debug!("ast_map: {:?} => {:?}", id, entry);
         let len = self.map.len();
-        if id as uint >= len {
-            self.map.extend(repeat(NotPresent).take(id as uint - len + 1));
+        if id as usize >= len {
+            self.map.extend(repeat(NotPresent).take(id as usize - len + 1));
         }
-        self.map[id as uint] = entry;
+        self.map[id as usize] = entry;
     }
 
     fn insert(&mut self, id: NodeId, node: Node<'ast>) {
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index bc7fbd46fd8..34fd498322c 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -156,7 +156,7 @@ pub fn int_ty_max(t: IntTy) -> u64 {
 }
 
 /// Get a string representation of an unsigned int type, with its value.
-/// We want to avoid "42uint" in favor of "42u"
+/// We want to avoid "42u" in favor of "42us". "42uint" is right out.
 pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
     let s = match t {
         TyUs(true) if val.is_some() => "u",
@@ -319,25 +319,25 @@ pub fn struct_field_visibility(field: ast::StructField) -> Visibility {
 }
 
 /// Maps a binary operator to its precedence
-pub fn operator_prec(op: ast::BinOp) -> uint {
+pub fn operator_prec(op: ast::BinOp) -> usize {
   match op {
       // 'as' sits here with 12
-      BiMul | BiDiv | BiRem     => 11u,
-      BiAdd | BiSub             => 10u,
-      BiShl | BiShr             =>  9u,
-      BiBitAnd                  =>  8u,
-      BiBitXor                  =>  7u,
-      BiBitOr                   =>  6u,
-      BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3u,
-      BiAnd                     =>  2u,
-      BiOr                      =>  1u
+      BiMul | BiDiv | BiRem     => 11us,
+      BiAdd | BiSub             => 10us,
+      BiShl | BiShr             =>  9us,
+      BiBitAnd                  =>  8us,
+      BiBitXor                  =>  7us,
+      BiBitOr                   =>  6us,
+      BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3us,
+      BiAnd                     =>  2us,
+      BiOr                      =>  1us
   }
 }
 
 /// Precedence of the `as` operator, which is a binary operator
 /// not appearing in the prior table.
 #[allow(non_upper_case_globals)]
-pub static as_prec: uint = 12u;
+pub static as_prec: usize = 12us;
 
 pub fn empty_generics() -> Generics {
     Generics {
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 6f57c06d33e..8f58b7694b6 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -170,7 +170,7 @@ pub fn mk_word_item(name: InternedString) -> P<MetaItem> {
     P(dummy_spanned(MetaWord(name)))
 }
 
-thread_local! { static NEXT_ATTR_ID: Cell<uint> = Cell::new(0) }
+thread_local! { static NEXT_ATTR_ID: Cell<usize> = Cell::new(0) }
 
 pub fn mk_attr_id() -> AttrId {
     let id = NEXT_ATTR_ID.with(|slot| {
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index bf26687deed..a5e10f42750 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -30,8 +30,8 @@ use libc::c_uint;
 use serialize::{Encodable, Decodable, Encoder, Decoder};
 
 pub trait Pos {
-    fn from_uint(n: uint) -> Self;
-    fn to_uint(&self) -> uint;
+    fn from_usize(n: usize) -> Self;
+    fn to_usize(&self) -> usize;
 }
 
 /// A byte offset. Keep this small (currently 32-bits), as AST contains
@@ -43,21 +43,21 @@ pub struct BytePos(pub u32);
 /// is not equivalent to a character offset. The CodeMap will convert BytePos
 /// values to CharPos values as necessary.
 #[derive(Copy, PartialEq, Hash, PartialOrd, Show)]
-pub struct CharPos(pub uint);
+pub struct CharPos(pub usize);
 
 // FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
 // have been unsuccessful
 
 impl Pos for BytePos {
-    fn from_uint(n: uint) -> BytePos { BytePos(n as u32) }
-    fn to_uint(&self) -> uint { let BytePos(n) = *self; n as uint }
+    fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
+    fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
 }
 
 impl Add for BytePos {
     type Output = BytePos;
 
     fn add(self, rhs: BytePos) -> BytePos {
-        BytePos((self.to_uint() + rhs.to_uint()) as u32)
+        BytePos((self.to_usize() + rhs.to_usize()) as u32)
     }
 }
 
@@ -65,20 +65,20 @@ impl Sub for BytePos {
     type Output = BytePos;
 
     fn sub(self, rhs: BytePos) -> BytePos {
-        BytePos((self.to_uint() - rhs.to_uint()) as u32)
+        BytePos((self.to_usize() - rhs.to_usize()) as u32)
     }
 }
 
 impl Pos for CharPos {
-    fn from_uint(n: uint) -> CharPos { CharPos(n) }
-    fn to_uint(&self) -> uint { let CharPos(n) = *self; n }
+    fn from_usize(n: usize) -> CharPos { CharPos(n) }
+    fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
 }
 
 impl Add for CharPos {
     type Output = CharPos;
 
     fn add(self, rhs: CharPos) -> CharPos {
-        CharPos(self.to_uint() + rhs.to_uint())
+        CharPos(self.to_usize() + rhs.to_usize())
     }
 }
 
@@ -86,7 +86,7 @@ impl Sub for CharPos {
     type Output = CharPos;
 
     fn sub(self, rhs: CharPos) -> CharPos {
-        CharPos(self.to_uint() - rhs.to_uint())
+        CharPos(self.to_usize() - rhs.to_usize())
     }
 }
 
@@ -173,7 +173,7 @@ pub struct Loc {
     /// Information about the original source
     pub file: Rc<FileMap>,
     /// The (1-based) line number
-    pub line: uint,
+    pub line: usize,
     /// The (0-based) column offset
     pub col: CharPos
 }
@@ -183,13 +183,13 @@ pub struct Loc {
 // perhaps they should just be removed.
 pub struct LocWithOpt {
     pub filename: FileName,
-    pub line: uint,
+    pub line: usize,
     pub col: CharPos,
     pub file: Option<Rc<FileMap>>,
 }
 
 // used to be structural records. Better names, anyone?
-pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
+pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
 pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
 
 /// The syntax with which a macro was invoked.
@@ -258,7 +258,7 @@ pub type FileName = String;
 
 pub struct FileLines {
     pub file: Rc<FileMap>,
-    pub lines: Vec<uint>
+    pub lines: Vec<usize>
 }
 
 /// Identifies an offset of a multi-byte character in a FileMap
@@ -267,7 +267,7 @@ pub struct MultiByteChar {
     /// The absolute offset of the character in the CodeMap
     pub pos: BytePos,
     /// The number of bytes, >=2
-    pub bytes: uint,
+    pub bytes: usize,
 }
 
 /// A single source in the CodeMap
@@ -306,11 +306,11 @@ impl FileMap {
 
     /// get a line from the list of pre-computed line-beginnings
     ///
-    pub fn get_line(&self, line_number: uint) -> Option<String> {
+    pub fn get_line(&self, line_number: usize) -> Option<String> {
         let lines = self.lines.borrow();
         lines.get(line_number).map(|&line| {
             let begin: BytePos = line - self.start_pos;
-            let begin = begin.to_uint();
+            let begin = begin.to_usize();
             let slice = &self.src[begin..];
             match slice.find('\n') {
                 Some(e) => &slice[..e],
@@ -319,7 +319,7 @@ impl FileMap {
         })
     }
 
-    pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
+    pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
         assert!(bytes >=2 && bytes <= 4);
         let mbc = MultiByteChar {
             pos: pos,
@@ -351,7 +351,7 @@ impl CodeMap {
         let mut files = self.files.borrow_mut();
         let start_pos = match files.last() {
             None => 0,
-            Some(last) => last.start_pos.to_uint() + last.src.len(),
+            Some(last) => last.start_pos.to_usize() + last.src.len(),
         };
 
         // Remove utf-8 BOM if any.
@@ -374,7 +374,7 @@ impl CodeMap {
         let filemap = Rc::new(FileMap {
             name: filename,
             src: src.to_string(),
-            start_pos: Pos::from_uint(start_pos),
+            start_pos: Pos::from_usize(start_pos),
             lines: RefCell::new(Vec::new()),
             multibyte_chars: RefCell::new(Vec::new()),
         });
@@ -389,7 +389,7 @@ impl CodeMap {
         (format!("<{}:{}:{}>",
                  pos.file.name,
                  pos.line,
-                 pos.col.to_uint() + 1)).to_string()
+                 pos.col.to_usize() + 1)).to_string()
     }
 
     /// Lookup source information about a BytePos
@@ -417,9 +417,9 @@ impl CodeMap {
         return (format!("{}:{}:{}: {}:{}",
                         lo.filename,
                         lo.line,
-                        lo.col.to_uint() + 1,
+                        lo.col.to_usize() + 1,
                         hi.line,
-                        hi.col.to_uint() + 1)).to_string()
+                        hi.col.to_usize() + 1)).to_string()
     }
 
     pub fn span_to_filename(&self, sp: Span) -> FileName {
@@ -430,7 +430,7 @@ impl CodeMap {
         let lo = self.lookup_char_pos(sp.lo);
         let hi = self.lookup_char_pos(sp.hi);
         let mut lines = Vec::new();
-        for i in range(lo.line - 1u, hi.line as uint) {
+        for i in range(lo.line - 1us, hi.line as usize) {
             lines.push(i);
         };
         FileLines {file: lo.file, lines: lines}
@@ -447,7 +447,7 @@ impl CodeMap {
         if begin.fm.start_pos != end.fm.start_pos {
             None
         } else {
-            Some((&begin.fm.src[begin.pos.to_uint()..end.pos.to_uint()]).to_string())
+            Some((&begin.fm.src[begin.pos.to_usize()..end.pos.to_usize()]).to_string())
         }
     }
 
@@ -484,24 +484,24 @@ impl CodeMap {
                 total_extra_bytes += mbc.bytes - 1;
                 // We should never see a byte position in the middle of a
                 // character
-                assert!(bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes);
+                assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
             } else {
                 break;
             }
         }
 
-        assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
-        CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
+        assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
+        CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
     }
 
-    fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
+    fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
         let files = self.files.borrow();
         let files = &*files;
         let len = files.len();
-        let mut a = 0u;
+        let mut a = 0us;
         let mut b = len;
-        while b - a > 1u {
-            let m = (a + b) / 2u;
+        while b - a > 1us {
+            let m = (a + b) / 2us;
             if files[m].start_pos > pos {
                 b = m;
             } else {
@@ -520,13 +520,13 @@ impl CodeMap {
             }
             if a == 0 {
                 panic!("position {} does not resolve to a source location",
-                      pos.to_uint());
+                      pos.to_usize());
             }
             a -= 1;
         }
         if a >= len {
             panic!("position {} does not resolve to a source location",
-                  pos.to_uint())
+                  pos.to_usize())
         }
 
         return a;
@@ -537,12 +537,12 @@ impl CodeMap {
 
         let files = self.files.borrow();
         let f = (*files)[idx].clone();
-        let mut a = 0u;
+        let mut a = 0us;
         {
             let lines = f.lines.borrow();
             let mut b = lines.len();
-            while b - a > 1u {
-                let m = (a + b) / 2u;
+            while b - a > 1us {
+                let m = (a + b) / 2us;
                 if (*lines)[m] > pos { b = m; } else { a = m; }
             }
         }
@@ -551,7 +551,7 @@ impl CodeMap {
 
     fn lookup_pos(&self, pos: BytePos) -> Loc {
         let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
-        let line = a + 1u; // Line numbers start at 1
+        let line = a + 1us; // Line numbers start at 1
         let chpos = self.bytepos_to_file_charpos(pos);
         let linebpos = (*f.lines.borrow())[a];
         let linechpos = self.bytepos_to_file_charpos(linebpos);
@@ -579,7 +579,7 @@ impl CodeMap {
     {
         match id {
             NO_EXPANSION => f(None),
-            ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
+            ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize]))
         }
     }
 
@@ -762,7 +762,7 @@ mod test {
 
         assert_eq!(file_lines.file.name, "blork.rs");
         assert_eq!(file_lines.lines.len(), 1);
-        assert_eq!(file_lines.lines[0], 1u);
+        assert_eq!(file_lines.lines[0], 1us);
     }
 
     #[test]
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index b26ec64c24b..c17ad058ada 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -26,7 +26,7 @@ use term::WriterWrapper;
 use term;
 
 /// maximum number of lines we will print for each error; arbitrary.
-static MAX_LINES: uint = 6u;
+static MAX_LINES: usize = 6us;
 
 #[derive(Clone, Copy)]
 pub enum RenderSpan {
@@ -141,7 +141,7 @@ impl SpanHandler {
 /// (fatal, bug, unimpl) may cause immediate exit,
 /// others log errors for later reporting.
 pub struct Handler {
-    err_count: Cell<uint>,
+    err_count: Cell<usize>,
     emit: RefCell<Box<Emitter + Send>>,
 }
 
@@ -155,20 +155,20 @@ impl Handler {
         self.bump_err_count();
     }
     pub fn bump_err_count(&self) {
-        self.err_count.set(self.err_count.get() + 1u);
+        self.err_count.set(self.err_count.get() + 1us);
     }
-    pub fn err_count(&self) -> uint {
+    pub fn err_count(&self) -> usize {
         self.err_count.get()
     }
     pub fn has_errors(&self) -> bool {
-        self.err_count.get()> 0u
+        self.err_count.get() > 0us
     }
     pub fn abort_if_errors(&self) {
         let s;
         match self.err_count.get() {
-          0u => return,
-          1u => s = "aborting due to previous error".to_string(),
-          _  => {
+          0us => return,
+          1us => s = "aborting due to previous error".to_string(),
+          _   => {
             s = format!("aborting due to {} previous errors",
                         self.err_count.get());
           }
@@ -452,7 +452,7 @@ fn highlight_lines(err: &mut EmitterWriter,
     let mut elided = false;
     let mut display_lines = &lines.lines[];
     if display_lines.len() > MAX_LINES {
-        display_lines = &display_lines[0u..MAX_LINES];
+        display_lines = &display_lines[0us..MAX_LINES];
         elided = true;
     }
     // Print the offending lines
@@ -463,32 +463,32 @@ fn highlight_lines(err: &mut EmitterWriter,
         }
     }
     if elided {
-        let last_line = display_lines[display_lines.len() - 1u];
-        let s = format!("{}:{} ", fm.name, last_line + 1u);
+        let last_line = display_lines[display_lines.len() - 1us];
+        let s = format!("{}:{} ", fm.name, last_line + 1us);
         try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
     }
 
     // FIXME (#3260)
     // If there's one line at fault we can easily point to the problem
-    if lines.lines.len() == 1u {
+    if lines.lines.len() == 1us {
         let lo = cm.lookup_char_pos(sp.lo);
-        let mut digits = 0u;
-        let mut num = (lines.lines[0] + 1u) / 10u;
+        let mut digits = 0us;
+        let mut num = (lines.lines[0] + 1us) / 10us;
 
         // how many digits must be indent past?
-        while num > 0u { num /= 10u; digits += 1u; }
+        while num > 0us { num /= 10us; digits += 1us; }
 
         // indent past |name:## | and the 0-offset column location
-        let left = fm.name.len() + digits + lo.col.to_uint() + 3u;
+        let left = fm.name.len() + digits + lo.col.to_usize() + 3us;
         let mut s = String::new();
         // Skip is the number of characters we need to skip because they are
         // part of the 'filename:line ' part of the previous line.
-        let skip = fm.name.len() + digits + 3u;
+        let skip = fm.name.len() + digits + 3us;
         for _ in range(0, skip) {
             s.push(' ');
         }
         if let Some(orig) = fm.get_line(lines.lines[0]) {
-            for pos in range(0u, left - skip) {
+            for pos in range(0us, left - skip) {
                 let cur_char = orig.as_bytes()[pos] as char;
                 // Whenever a tab occurs on the previous line, we insert one on
                 // the error-point-squiggly-line as well (instead of a space).
@@ -506,7 +506,7 @@ fn highlight_lines(err: &mut EmitterWriter,
         let hi = cm.lookup_char_pos(sp.hi);
         if hi.col != lo.col {
             // the ^ already takes up one space
-            let num_squigglies = hi.col.to_uint() - lo.col.to_uint() - 1u;
+            let num_squigglies = hi.col.to_usize() - lo.col.to_usize() - 1us;
             for _ in range(0, num_squigglies) {
                 s.push('~');
             }
@@ -555,7 +555,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter,
     let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1]+1);
     let hi = cm.lookup_char_pos(sp.hi);
     // Span seems to use half-opened interval, so subtract 1
-    let skip = last_line_start.len() + hi.col.to_uint() - 1;
+    let skip = last_line_start.len() + hi.col.to_usize() - 1;
     let mut s = String::new();
     for _ in range(0, skip) {
         s.push(' ');
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index f2498abfa6a..9128bc05f6f 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -548,7 +548,7 @@ pub struct ExtCtxt<'a> {
     pub exported_macros: Vec<ast::MacroDef>,
 
     pub syntax_env: SyntaxEnv,
-    pub recursion_count: uint,
+    pub recursion_count: usize,
 }
 
 impl<'a> ExtCtxt<'a> {
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index c34142aec39..8773c0f2f79 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -116,7 +116,7 @@ pub trait AstBuilder {
     fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
     fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
     fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>,
-                             idx: uint) -> P<ast::Expr>;
+                             idx: usize) -> P<ast::Expr>;
     fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
     fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
     fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>,
@@ -134,8 +134,8 @@ pub trait AstBuilder {
 
     fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr>;
 
-    fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr>;
-    fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr>;
+    fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr>;
+    fn expr_int(&self, sp: Span, i: isize) -> P<ast::Expr>;
     fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>;
     fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>;
 
@@ -579,7 +579,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
         let field_name = token::get_ident(ident);
         let field_span = Span {
-            lo: sp.lo - Pos::from_uint(field_name.get().len()),
+            lo: sp.lo - Pos::from_usize(field_name.get().len()),
             hi: sp.hi,
             expn_id: sp.expn_id,
         };
@@ -587,9 +587,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         let id = Spanned { node: ident, span: field_span };
         self.expr(sp, ast::ExprField(expr, id))
     }
-    fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
+    fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> {
         let field_span = Span {
-            lo: sp.lo - Pos::from_uint(idx.to_string().len()),
+            lo: sp.lo - Pos::from_usize(idx.to_string().len()),
             hi: sp.hi,
             expn_id: sp.expn_id,
         };
@@ -641,10 +641,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr> {
         self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
     }
-    fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr> {
+    fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
         self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs(false))))
     }
-    fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
+    fn expr_int(&self, sp: Span, i: isize) -> P<ast::Expr> {
         self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyIs(false),
                                                                   ast::Sign::new(i))))
     }
@@ -710,7 +710,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         let loc = self.codemap().lookup_char_pos(span.lo);
         let expr_file = self.expr_str(span,
                                       token::intern_and_get_ident(&loc.file.name[]));
-        let expr_line = self.expr_uint(span, loc.line);
+        let expr_line = self.expr_usize(span, loc.line);
         let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
         let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
         self.expr_call_global(
diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs
index 8edbf018f22..e458bd58e8b 100644
--- a/src/libsyntax/ext/deriving/decodable.rs
+++ b/src/libsyntax/ext/deriving/decodable.rs
@@ -114,7 +114,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                 cx.expr_try(span,
                     cx.expr_method_call(span, blkdecoder.clone(), read_struct_field,
                                         vec!(cx.expr_str(span, name),
-                                          cx.expr_uint(span, field),
+                                          cx.expr_usize(span, field),
                                           exprdecode.clone())))
             });
             let result = cx.expr_ok(trait_span, result);
@@ -123,7 +123,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                                 cx.ident_of("read_struct"),
                                 vec!(
                 cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
-                cx.expr_uint(trait_span, nfields),
+                cx.expr_usize(trait_span, nfields),
                 cx.lambda_expr_1(trait_span, result, blkarg)
             ))
         }
@@ -143,14 +143,14 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                                                    path,
                                                    parts,
                                                    |cx, span, _, field| {
-                    let idx = cx.expr_uint(span, field);
+                    let idx = cx.expr_usize(span, field);
                     cx.expr_try(span,
                         cx.expr_method_call(span, blkdecoder.clone(), rvariant_arg,
                                             vec!(idx, exprdecode.clone())))
                 });
 
                 arms.push(cx.arm(v_span,
-                                 vec!(cx.pat_lit(v_span, cx.expr_uint(v_span, i))),
+                                 vec!(cx.pat_lit(v_span, cx.expr_usize(v_span, i))),
                                  decoded));
             }
 
@@ -179,14 +179,14 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
 
 /// Create a decoder for a single enum variant/struct:
 /// - `outer_pat_path` is the path to this enum variant/struct
-/// - `getarg` should retrieve the `uint`-th field with name `@str`.
+/// - `getarg` should retrieve the `usize`-th field with name `@str`.
 fn decode_static_fields<F>(cx: &mut ExtCtxt,
                            trait_span: Span,
                            outer_pat_path: ast::Path,
                            fields: &StaticFields,
                            mut getarg: F)
                            -> P<Expr> where
-    F: FnMut(&mut ExtCtxt, Span, InternedString, uint) -> P<Expr>,
+    F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P<Expr>,
 {
     match *fields {
         Unnamed(ref fields) => {
diff --git a/src/libsyntax/ext/deriving/default.rs b/src/libsyntax/ext/deriving/default.rs
index 36c3f2c0ccb..f8fdd8575bd 100644
--- a/src/libsyntax/ext/deriving/default.rs
+++ b/src/libsyntax/ext/deriving/default.rs
@@ -79,7 +79,7 @@ fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructur
         StaticEnum(..) => {
             cx.span_err(trait_span, "`Default` cannot be derived for enums, only structs");
             // let compilation continue
-            cx.expr_uint(trait_span, 0)
+            cx.expr_usize(trait_span, 0)
         }
         _ => cx.span_bug(trait_span, "Non-static method in `derive(Default)`")
     };
diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs
index 801ae213a7b..4c78a7b6a0c 100644
--- a/src/libsyntax/ext/deriving/encodable.rs
+++ b/src/libsyntax/ext/deriving/encodable.rs
@@ -16,7 +16,7 @@
 //!
 //! ```ignore
 //! #[derive(Encodable, Decodable)]
-//! struct Node { id: uint }
+//! struct Node { id: usize }
 //! ```
 //!
 //! would generate two implementations like:
@@ -27,7 +27,7 @@
 //!         s.emit_struct("Node", 1, |this| {
 //!             this.emit_struct_field("id", 0, |this| {
 //!                 Encodable::encode(&self.id, this)
-//!                 /* this.emit_uint(self.id) can also be used */
+//!                 /* this.emit_usize(self.id) can also be used */
 //!             })
 //!         })
 //!     }
@@ -192,7 +192,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                 let call = cx.expr_method_call(span, blkencoder.clone(),
                                                emit_struct_field,
                                                vec!(cx.expr_str(span, name),
-                                                 cx.expr_uint(span, i),
+                                                 cx.expr_usize(span, i),
                                                  lambda));
 
                 // last call doesn't need a try!
@@ -218,7 +218,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                                 cx.ident_of("emit_struct"),
                                 vec!(
                 cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
-                cx.expr_uint(trait_span, fields.len()),
+                cx.expr_usize(trait_span, fields.len()),
                 blk
             ))
         }
@@ -239,7 +239,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                 let lambda = cx.lambda_expr_1(span, enc, blkarg);
                 let call = cx.expr_method_call(span, blkencoder.clone(),
                                                emit_variant_arg,
-                                               vec!(cx.expr_uint(span, i),
+                                               vec!(cx.expr_usize(span, i),
                                                  lambda));
                 let call = if i != last {
                     cx.expr_try(span, call)
@@ -262,8 +262,8 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
             let call = cx.expr_method_call(trait_span, blkencoder,
                                            cx.ident_of("emit_enum_variant"),
                                            vec!(name,
-                                             cx.expr_uint(trait_span, idx),
-                                             cx.expr_uint(trait_span, fields.len()),
+                                             cx.expr_usize(trait_span, idx),
+                                             cx.expr_usize(trait_span, fields.len()),
                                              blk));
             let blk = cx.lambda_expr_1(trait_span, call, blkarg);
             let ret = cx.expr_method_call(trait_span,
diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs
index 293e4befd3b..583f316289f 100644
--- a/src/libsyntax/ext/deriving/generic/mod.rs
+++ b/src/libsyntax/ext/deriving/generic/mod.rs
@@ -28,7 +28,7 @@
 //! arguments:
 //!
 //! - `Struct`, when `Self` is a struct (including tuple structs, e.g
-//!   `struct T(int, char)`).
+//!   `struct T(i32, char)`).
 //! - `EnumMatching`, when `Self` is an enum and all the arguments are the
 //!   same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`)
 //! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments
@@ -54,17 +54,17 @@
 //! following snippet
 //!
 //! ```rust
-//! struct A { x : int }
+//! struct A { x : i32 }
 //!
-//! struct B(int);
+//! struct B(i32);
 //!
 //! enum C {
-//!     C0(int),
-//!     C1 { x: int }
+//!     C0(i32),
+//!     C1 { x: i32 }
 //! }
 //! ```
 //!
-//! The `int`s in `B` and `C0` don't have an identifier, so the
+//! The `i32`s in `B` and `C0` don't have an identifier, so the
 //! `Option<ident>`s would be `None` for them.
 //!
 //! In the static cases, the structure is summarised, either into the just
@@ -90,8 +90,8 @@
 //! trait PartialEq {
 //!     fn eq(&self, other: &Self);
 //! }
-//! impl PartialEq for int {
-//!     fn eq(&self, other: &int) -> bool {
+//! impl PartialEq for i32 {
+//!     fn eq(&self, other: &i32) -> bool {
 //!         *self == *other
 //!     }
 //! }
@@ -117,7 +117,7 @@
 //!
 //! ```{.text}
 //! Struct(vec![FieldInfo {
-//!           span: <span of `int`>,
+//!           span: <span of `i32`>,
 //!           name: None,
 //!           self_: <expr for &a>
 //!           other: vec![<expr for &b>]
@@ -132,7 +132,7 @@
 //! ```{.text}
 //! EnumMatching(0, <ast::Variant for C0>,
 //!              vec![FieldInfo {
-//!                 span: <span of int>
+//!                 span: <span of i32>
 //!                 name: None,
 //!                 self_: <expr for &a>,
 //!                 other: vec![<expr for &b>]
@@ -179,7 +179,7 @@
 //! StaticStruct(<ast::StructDef of B>, Unnamed(vec![<span of x>]))
 //!
 //! StaticEnum(<ast::EnumDef of C>,
-//!            vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of int>])),
+//!            vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])),
 //!                 (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))])
 //! ```
 
@@ -294,7 +294,7 @@ pub enum SubstructureFields<'a> {
     /// Matching variants of the enum: variant index, ast::Variant,
     /// fields: the field name is only non-`None` in the case of a struct
     /// variant.
-    EnumMatching(uint, &'a ast::Variant, Vec<FieldInfo>),
+    EnumMatching(usize, &'a ast::Variant, Vec<FieldInfo>),
 
     /// Non-matching variants of the enum, but with all state hidden from
     /// the consequent code.  The first component holds `Ident`s for all of
@@ -719,7 +719,7 @@ impl<'a> MethodDef<'a> {
 
     /// ```
     /// #[derive(PartialEq)]
-    /// struct A { x: int, y: int }
+    /// struct A { x: i32, y: i32 }
     ///
     /// // equivalent to:
     /// impl PartialEq for A {
@@ -748,7 +748,7 @@ impl<'a> MethodDef<'a> {
         let mut raw_fields = Vec::new(); // ~[[fields of self],
                                  // [fields of next Self arg], [etc]]
         let mut patterns = Vec::new();
-        for i in range(0u, self_args.len()) {
+        for i in range(0us, self_args.len()) {
             let struct_path= cx.path(DUMMY_SP, vec!( type_ident ));
             let (pat, ident_expr) =
                 trait_.create_struct_pattern(cx,
@@ -825,7 +825,7 @@ impl<'a> MethodDef<'a> {
     /// #[derive(PartialEq)]
     /// enum A {
     ///     A1,
-    ///     A2(int)
+    ///     A2(i32)
     /// }
     ///
     /// // is equivalent to
@@ -837,8 +837,8 @@ impl<'a> MethodDef<'a> {
     ///             (&A2(ref __self_0),
     ///              &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)),
     ///             _ => {
-    ///                 let __self_vi = match *self { A1(..) => 0u, A2(..) => 1u };
-    ///                 let __arg_1_vi = match *__arg_1 { A1(..) => 0u, A2(..) => 1u };
+    ///                 let __self_vi = match *self { A1(..) => 0us, A2(..) => 1us };
+    ///                 let __arg_1_vi = match *__arg_1 { A1(..) => 0us, A2(..) => 1us };
     ///                 false
     ///             }
     ///         }
@@ -882,8 +882,8 @@ impl<'a> MethodDef<'a> {
     ///   (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2
     ///   ...
     ///   _ => {
-    ///     let __this_vi = match this { Variant1 => 0u, Variant2 => 1u, ... };
-    ///     let __that_vi = match that { Variant1 => 0u, Variant2 => 1u, ... };
+    ///     let __this_vi = match this { Variant1 => 0us, Variant2 => 1us, ... };
+    ///     let __that_vi = match that { Variant1 => 0us, Variant2 => 1us, ... };
     ///     ... // catch-all remainder can inspect above variant index values.
     ///   }
     /// }
@@ -915,7 +915,7 @@ impl<'a> MethodDef<'a> {
             .collect::<Vec<ast::Ident>>();
 
         // The `vi_idents` will be bound, solely in the catch-all, to
-        // a series of let statements mapping each self_arg to a uint
+        // a series of let statements mapping each self_arg to a usize
         // corresponding to its variant index.
         let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
             .map(|name| { let vi_suffix = format!("{}_vi", &name[]);
@@ -1039,19 +1039,19 @@ impl<'a> MethodDef<'a> {
                 }).collect();
 
             // Build a series of let statements mapping each self_arg
-            // to a uint corresponding to its variant index.
+            // to a usize corresponding to its variant index.
             // i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
             // with three Self args, builds three statements:
             //
             // ```
             // let __self0_vi = match   self {
-            //     A => 0u, B(..) => 1u, C(..) => 2u
+            //     A => 0us, B(..) => 1us, C(..) => 2us
             // };
             // let __self1_vi = match __arg1 {
-            //     A => 0u, B(..) => 1u, C(..) => 2u
+            //     A => 0us, B(..) => 1us, C(..) => 2us
             // };
             // let __self2_vi = match __arg2 {
-            //     A => 0u, B(..) => 1u, C(..) => 2u
+            //     A => 0us, B(..) => 1us, C(..) => 2us
             // };
             // ```
             let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new();
diff --git a/src/libsyntax/ext/deriving/generic/ty.rs b/src/libsyntax/ext/deriving/generic/ty.rs
index 5e6a9c91ce0..85682d41b5f 100644
--- a/src/libsyntax/ext/deriving/generic/ty.rs
+++ b/src/libsyntax/ext/deriving/generic/ty.rs
@@ -32,7 +32,7 @@ pub enum PtrTy<'a> {
     Raw(ast::Mutability),
 }
 
-/// A path, e.g. `::std::option::Option::<int>` (global). Has support
+/// A path, e.g. `::std::option::Option::<i32>` (global). Has support
 /// for type parameters and a lifetime.
 #[derive(Clone)]
 pub struct Path<'a> {
@@ -91,7 +91,7 @@ pub enum Ty<'a> {
     /// &/Box/ Ty
     Ptr(Box<Ty<'a>>, PtrTy<'a>),
     /// mod::mod::Type<[lifetime], [Params...]>, including a plain type
-    /// parameter, and things like `int`
+    /// parameter, and things like `i32`
     Literal(Path<'a>),
     /// includes unit
     Tuple(Vec<Ty<'a>> )
diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs
index 08336be87d1..8dac864c2ae 100644
--- a/src/libsyntax/ext/deriving/hash.rs
+++ b/src/libsyntax/ext/deriving/hash.rs
@@ -89,7 +89,7 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
             // iteration function.
             let discriminant = match variant.node.disr_expr {
                 Some(ref d) => d.clone(),
-                None => cx.expr_uint(trait_span, index)
+                None => cx.expr_usize(trait_span, index)
             };
 
             stmts.push(call_hash(trait_span, discriminant));
diff --git a/src/libsyntax/ext/deriving/rand.rs b/src/libsyntax/ext/deriving/rand.rs
index b5435896791..bb902d7059c 100644
--- a/src/libsyntax/ext/deriving/rand.rs
+++ b/src/libsyntax/ext/deriving/rand.rs
@@ -80,10 +80,10 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
             if variants.is_empty() {
                 cx.span_err(trait_span, "`Rand` cannot be derived for enums with no variants");
                 // let compilation continue
-                return cx.expr_uint(trait_span, 0);
+                return cx.expr_usize(trait_span, 0);
             }
 
-            let variant_count = cx.expr_uint(trait_span, variants.len());
+            let variant_count = cx.expr_usize(trait_span, variants.len());
 
             let rand_name = cx.path_all(trait_span,
                                         true,
@@ -115,7 +115,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
                                               variant_count);
 
             let mut arms = variants.iter().enumerate().map(|(i, &(ident, v_span, ref summary))| {
-                let i_expr = cx.expr_uint(v_span, i);
+                let i_expr = cx.expr_usize(v_span, i);
                 let pat = cx.pat_lit(v_span, i_expr);
 
                 let path = cx.path(v_span, vec![substr.type_ident, ident]);
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index 9b54e259761..9f6bf352b04 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -104,7 +104,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     let e = match os::getenv(var.get()) {
         None => {
             cx.span_err(sp, msg.get());
-            cx.expr_uint(sp, 0)
+            cx.expr_usize(sp, 0)
         }
         Some(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))
     };
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index c95bdeefd45..9aa602b39b4 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -273,7 +273,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
         // in this file.
         // Token-tree macros:
         MacInvocTT(pth, tts, _) => {
-            if pth.segments.len() > 1u {
+            if pth.segments.len() > 1us {
                 fld.cx.span_err(pth.span,
                                 "expected macro name without module \
                                 separators");
@@ -844,7 +844,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
             },
             _ => unreachable!()
         };
-        if pth.segments.len() > 1u {
+        if pth.segments.len() > 1us {
             fld.cx.span_err(pth.span, "expected macro name without module separators");
             return DummyResult::raw_pat(span);
         }
@@ -1311,7 +1311,7 @@ fn new_span(cx: &ExtCtxt, sp: Span) -> Span {
 pub struct ExpansionConfig {
     pub crate_name: String,
     pub enable_quotes: bool,
-    pub recursion_limit: uint,
+    pub recursion_limit: usize,
 }
 
 impl ExpansionConfig {
@@ -1507,7 +1507,7 @@ mod test {
     #[should_fail]
     #[test] fn macros_cant_escape_fns_test () {
         let src = "fn bogus() {macro_rules! z (() => (3+4));}\
-                   fn inty() -> int { z!() }".to_string();
+                   fn inty() -> i32 { z!() }".to_string();
         let sess = parse::new_parse_sess();
         let crate_ast = parse::parse_crate_from_source_str(
             "<test>".to_string(),
@@ -1521,7 +1521,7 @@ mod test {
     #[should_fail]
     #[test] fn macros_cant_escape_mods_test () {
         let src = "mod foo {macro_rules! z (() => (3+4));}\
-                   fn inty() -> int { z!() }".to_string();
+                   fn inty() -> i32 { z!() }".to_string();
         let sess = parse::new_parse_sess();
         let crate_ast = parse::parse_crate_from_source_str(
             "<test>".to_string(),
@@ -1533,7 +1533,7 @@ mod test {
     // macro_use modules should allow macros to escape
     #[test] fn macros_can_escape_flattened_mods_test () {
         let src = "#[macro_use] mod foo {macro_rules! z (() => (3+4));}\
-                   fn inty() -> int { z!() }".to_string();
+                   fn inty() -> i32 { z!() }".to_string();
         let sess = parse::new_parse_sess();
         let crate_ast = parse::parse_crate_from_source_str(
             "<test>".to_string(),
@@ -1564,8 +1564,8 @@ mod test {
     // should be able to use a bound identifier as a literal in a macro definition:
     #[test] fn self_macro_parsing(){
         expand_crate_str(
-            "macro_rules! foo ((zz) => (287u;));
-            fn f(zz : int) {foo!(zz);}".to_string()
+            "macro_rules! foo ((zz) => (287;));
+            fn f(zz: i32) {foo!(zz);}".to_string()
             );
     }
 
@@ -1595,29 +1595,29 @@ mod test {
     // in principle, you might want to control this boolean on a per-varref basis,
     // but that would make things even harder to understand, and might not be
     // necessary for thorough testing.
-    type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
+    type RenamingTest = (&'static str, Vec<Vec<usize>>, bool);
 
     #[test]
     fn automatic_renaming () {
         let tests: Vec<RenamingTest> =
             vec!(// b & c should get new names throughout, in the expr too:
-                ("fn a() -> int { let b = 13; let c = b; b+c }",
+                ("fn a() -> i32 { let b = 13; let c = b; b+c }",
                  vec!(vec!(0,1),vec!(2)), false),
                 // both x's should be renamed (how is this causing a bug?)
-                ("fn main () {let x: int = 13;x;}",
+                ("fn main () {let x: i32 = 13;x;}",
                  vec!(vec!(0)), false),
                 // the use of b after the + should be renamed, the other one not:
-                ("macro_rules! f (($x:ident) => (b + $x)); fn a() -> int { let b = 13; f!(b)}",
+                ("macro_rules! f (($x:ident) => (b + $x)); fn a() -> i32 { let b = 13; f!(b)}",
                  vec!(vec!(1)), false),
                 // the b before the plus should not be renamed (requires marks)
-                ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})); fn a() -> int { f!(b)}",
+                ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})); fn a() -> i32 { f!(b)}",
                  vec!(vec!(1)), false),
                 // the marks going in and out of letty should cancel, allowing that $x to
                 // capture the one following the semicolon.
                 // this was an awesome test case, and caught a *lot* of bugs.
                 ("macro_rules! letty(($x:ident) => (let $x = 15;));
                   macro_rules! user(($x:ident) => ({letty!($x); $x}));
-                  fn main() -> int {user!(z)}",
+                  fn main() -> i32 {user!(z)}",
                  vec!(vec!(0)), false)
                 );
         for (idx,s) in tests.iter().enumerate() {
@@ -1680,13 +1680,13 @@ mod test {
     // can't write this test case until we have macro-generating macros.
 
     // method arg hygiene
-    // method expands to fn get_x(&self_0, x_1:int) {self_0 + self_2 + x_3 + x_1}
+    // method expands to fn get_x(&self_0, x_1: i32) {self_0 + self_2 + x_3 + x_1}
     #[test] fn method_arg_hygiene(){
         run_renaming_test(
             &("macro_rules! inject_x (()=>(x));
               macro_rules! inject_self (()=>(self));
               struct A;
-              impl A{fn get_x(&self, x: int) {self + inject_self!() + inject_x!() + x;} }",
+              impl A{fn get_x(&self, x: i32) {self + inject_self!() + inject_x!() + x;} }",
               vec!(vec!(0),vec!(3)),
               true),
             0)
@@ -1706,21 +1706,21 @@ mod test {
     }
 
     // item fn hygiene
-    // expands to fn q(x_1:int){fn g(x_2:int){x_2 + x_1};}
+    // expands to fn q(x_1: i32){fn g(x_2: i32){x_2 + x_1};}
     #[test] fn issue_9383(){
         run_renaming_test(
-            &("macro_rules! bad_macro (($ex:expr) => (fn g(x:int){ x + $ex }));
-              fn q(x:int) { bad_macro!(x); }",
+            &("macro_rules! bad_macro (($ex:expr) => (fn g(x: i32){ x + $ex }));
+              fn q(x: i32) { bad_macro!(x); }",
               vec!(vec!(1),vec!(0)),true),
             0)
     }
 
     // closure arg hygiene (ExprClosure)
-    // expands to fn f(){(|x_1 : int| {(x_2 + x_1)})(3);}
+    // expands to fn f(){(|x_1 : i32| {(x_2 + x_1)})(3);}
     #[test] fn closure_arg_hygiene(){
         run_renaming_test(
             &("macro_rules! inject_x (()=>(x));
-            fn f(){(|x : int| {(inject_x!() + x)})(3);}",
+            fn f(){(|x : i32| {(inject_x!() + x)})(3);}",
               vec!(vec!(1)),
               true),
             0)
@@ -1729,7 +1729,7 @@ mod test {
     // macro_rules in method position. Sadly, unimplemented.
     #[test] fn macro_in_method_posn(){
         expand_crate_str(
-            "macro_rules! my_method (() => (fn thirteen(&self) -> int {13}));
+            "macro_rules! my_method (() => (fn thirteen(&self) -> i32 {13}));
             struct A;
             impl A{ my_method!(); }
             fn f(){A.thirteen;}".to_string());
@@ -1749,7 +1749,7 @@ mod test {
     }
 
     // run one of the renaming tests
-    fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
+    fn run_renaming_test(t: &RenamingTest, test_idx: usize) {
         let invalid_name = token::special_idents::invalid.name;
         let (teststr, bound_connections, bound_ident_check) = match *t {
             (ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
@@ -1876,7 +1876,7 @@ foo_module!();
     // it's the name of a 0-ary variant, and that 'i' appears twice in succession.
     #[test]
     fn crate_bindings_test(){
-        let the_crate = string_to_crate("fn main (a : int) -> int {|b| {
+        let the_crate = string_to_crate("fn main (a: i32) -> i32 {|b| {
         match 34 {None => 3, Some(i) | i => j, Foo{k:z,l:y} => \"banana\"}} }".to_string());
         let idents = crate_bindings(&the_crate);
         assert_eq!(idents, strs_to_idents(vec!("a","b","None","i","i","z","y")));
@@ -1885,10 +1885,10 @@ foo_module!();
     // test the IdentRenamer directly
     #[test]
     fn ident_renamer_test () {
-        let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string());
+        let the_crate = string_to_crate("fn f(x: i32){let x = x; x}".to_string());
         let f_ident = token::str_to_ident("f");
         let x_ident = token::str_to_ident("x");
-        let int_ident = token::str_to_ident("int");
+        let int_ident = token::str_to_ident("i32");
         let renames = vec!((x_ident,Name(16)));
         let mut renamer = IdentRenamer{renames: &renames};
         let renamed_crate = renamer.fold_crate(the_crate);
@@ -1900,10 +1900,10 @@ foo_module!();
     // test the PatIdentRenamer; only PatIdents get renamed
     #[test]
     fn pat_ident_renamer_test () {
-        let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string());
+        let the_crate = string_to_crate("fn f(x: i32){let x = x; x}".to_string());
         let f_ident = token::str_to_ident("f");
         let x_ident = token::str_to_ident("x");
-        let int_ident = token::str_to_ident("int");
+        let int_ident = token::str_to_ident("i32");
         let renames = vec!((x_ident,Name(16)));
         let mut renamer = PatIdentRenamer{renames: &renames};
         let renamed_crate = renamer.fold_crate(the_crate);
diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs
index f512b33f024..068c7ec0867 100644
--- a/src/libsyntax/ext/format.rs
+++ b/src/libsyntax/ext/format.rs
@@ -31,7 +31,7 @@ enum ArgumentType {
 }
 
 enum Position {
-    Exact(uint),
+    Exact(usize),
     Named(String),
 }
 
@@ -61,11 +61,11 @@ struct Context<'a, 'b:'a> {
     /// Stays `true` if all formatting parameters are default (as in "{}{}").
     all_pieces_simple: bool,
 
-    name_positions: HashMap<String, uint>,
+    name_positions: HashMap<String, usize>,
 
     /// Updated as arguments are consumed or methods are entered
-    nest_level: uint,
-    next_arg: uint,
+    nest_level: usize,
+    next_arg: usize,
 }
 
 /// Parses the arguments from the given list of tokens, returning None
@@ -326,11 +326,11 @@ impl<'a, 'b> Context<'a, 'b> {
         match c {
             parse::CountIs(i) => {
                 self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIs"),
-                                          vec!(self.ecx.expr_uint(sp, i)))
+                                          vec!(self.ecx.expr_usize(sp, i)))
             }
             parse::CountIsParam(i) => {
                 self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIsParam"),
-                                          vec!(self.ecx.expr_uint(sp, i)))
+                                          vec!(self.ecx.expr_usize(sp, i)))
             }
             parse::CountImplied => {
                 let path = self.ecx.path_global(sp, Context::rtpath(self.ecx,
@@ -349,7 +349,7 @@ impl<'a, 'b> Context<'a, 'b> {
                 };
                 let i = i + self.args.len();
                 self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIsParam"),
-                                          vec!(self.ecx.expr_uint(sp, i)))
+                                          vec!(self.ecx.expr_usize(sp, i)))
             }
         }
     }
@@ -382,7 +382,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     }
                     parse::ArgumentIs(i) => {
                         self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "ArgumentIs"),
-                                                  vec!(self.ecx.expr_uint(sp, i)))
+                                                  vec!(self.ecx.expr_usize(sp, i)))
                     }
                     // Named arguments are converted to positional arguments at
                     // the end of the list of arguments
@@ -393,7 +393,7 @@ impl<'a, 'b> Context<'a, 'b> {
                         };
                         let i = i + self.args.len();
                         self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "ArgumentIs"),
-                                                  vec!(self.ecx.expr_uint(sp, i)))
+                                                  vec!(self.ecx.expr_usize(sp, i)))
                     }
                 };
 
@@ -432,7 +432,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     }
                 };
                 let align = self.ecx.expr_path(align);
-                let flags = self.ecx.expr_uint(sp, arg.format.flags);
+                let flags = self.ecx.expr_usize(sp, arg.format.flags);
                 let prec = self.trans_count(arg.format.precision);
                 let width = self.trans_count(arg.format.width);
                 let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec"));
diff --git a/src/libsyntax/ext/mtwt.rs b/src/libsyntax/ext/mtwt.rs
index ae8ff118fcc..7adc443759f 100644
--- a/src/libsyntax/ext/mtwt.rs
+++ b/src/libsyntax/ext/mtwt.rs
@@ -187,7 +187,7 @@ fn resolve_internal(id: Ident,
     }
 
     let resolved = {
-        let result = (*table.table.borrow())[id.ctxt as uint];
+        let result = (*table.table.borrow())[id.ctxt as usize];
         match result {
             EmptyCtxt => id.name,
             // ignore marks here:
@@ -231,7 +231,7 @@ fn marksof_internal(ctxt: SyntaxContext,
     let mut result = Vec::new();
     let mut loopvar = ctxt;
     loop {
-        let table_entry = (*table.table.borrow())[loopvar as uint];
+        let table_entry = (*table.table.borrow())[loopvar as usize];
         match table_entry {
             EmptyCtxt => {
                 return result;
@@ -258,7 +258,7 @@ fn marksof_internal(ctxt: SyntaxContext,
 /// FAILS when outside is not a mark.
 pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
     with_sctable(|sctable| {
-        match (*sctable.table.borrow())[ctxt as uint] {
+        match (*sctable.table.borrow())[ctxt as usize] {
             Mark(mrk, _) => mrk,
             _ => panic!("can't retrieve outer mark when outside is not a mark")
         }
@@ -330,7 +330,7 @@ mod tests {
         let mut result = Vec::new();
         loop {
             let table = table.table.borrow();
-            match (*table)[sc as uint] {
+            match (*table)[sc as usize] {
                 EmptyCtxt => {return result;},
                 Mark(mrk,tail) => {
                     result.push(M(mrk));
@@ -398,7 +398,7 @@ mod tests {
          assert_eq! (marksof_internal (ans, stopname,&t), vec!(16));}
         // rename where stop doesn't match:
         { let chain = vec!(M(9),
-                        R(id(name1.uint() as u32,
+                        R(id(name1.usize() as u32,
                              apply_mark_internal (4, EMPTY_CTXT,&mut t)),
                           Name(100101102)),
                         M(14));
@@ -407,7 +407,7 @@ mod tests {
         // rename where stop does match
         { let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
          let chain = vec!(M(9),
-                       R(id(name1.uint() as u32, name1sc),
+                       R(id(name1.usize() as u32, name1sc),
                          stopname),
                        M(14));
          let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index c42b188302c..5339c3d77c6 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -588,7 +588,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
         }
 
         token::Literal(token::StrRaw(ident, n), suf) => {
-            return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n))
+            return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_usize(sp, n))
         }
 
         token::Ident(ident, style) => {
@@ -716,7 +716,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     // try removing it when enough of them are gone.
 
     let mut p = cx.new_parser_from_tts(tts);
-    p.quote_depth += 1u;
+    p.quote_depth += 1us;
 
     let cx_expr = p.parse_expr();
     if !p.eat(&token::Comma) {
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 31a1a838b13..a74adbf4085 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -35,7 +35,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     let topmost = cx.original_span_in_file();
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
 
-    base::MacExpr::new(cx.expr_uint(topmost, loc.line))
+    base::MacExpr::new(cx.expr_usize(topmost, loc.line))
 }
 
 /* column!(): expands to the current column number */
@@ -45,7 +45,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let topmost = cx.original_span_in_file();
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
-    base::MacExpr::new(cx.expr_uint(topmost, loc.col.to_uint()))
+    base::MacExpr::new(cx.expr_usize(topmost, loc.col.to_usize()))
 }
 
 /// file!(): expands to the current filename */
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 9eda4bcef99..d115f2ed620 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -110,14 +110,14 @@ enum TokenTreeOrTokenTreeVec {
 }
 
 impl TokenTreeOrTokenTreeVec {
-    fn len(&self) -> uint {
+    fn len(&self) -> usize {
         match self {
             &TtSeq(ref v) => v.len(),
             &Tt(ref tt) => tt.len(),
         }
     }
 
-    fn get_tt(&self, index: uint) -> TokenTree {
+    fn get_tt(&self, index: usize) -> TokenTree {
         match self {
             &TtSeq(ref v) => v[index].clone(),
             &Tt(ref tt) => tt.get_tt(index),
@@ -129,7 +129,7 @@ impl TokenTreeOrTokenTreeVec {
 #[derive(Clone)]
 struct MatcherTtFrame {
     elts: TokenTreeOrTokenTreeVec,
-    idx: uint,
+    idx: usize,
 }
 
 #[derive(Clone)]
@@ -137,16 +137,16 @@ pub struct MatcherPos {
     stack: Vec<MatcherTtFrame>,
     top_elts: TokenTreeOrTokenTreeVec,
     sep: Option<Token>,
-    idx: uint,
+    idx: usize,
     up: Option<Box<MatcherPos>>,
     matches: Vec<Vec<Rc<NamedMatch>>>,
-    match_lo: uint,
-    match_cur: uint,
-    match_hi: uint,
+    match_lo: usize,
+    match_cur: usize,
+    match_hi: usize,
     sp_lo: BytePos,
 }
 
-pub fn count_names(ms: &[TokenTree]) -> uint {
+pub fn count_names(ms: &[TokenTree]) -> usize {
     ms.iter().fold(0, |count, elt| {
         count + match elt {
             &TtSequence(_, ref seq) => {
@@ -171,11 +171,11 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
         stack: vec![],
         top_elts: TtSeq(ms),
         sep: sep,
-        idx: 0u,
+        idx: 0us,
         up: None,
         matches: matches,
-        match_lo: 0u,
-        match_cur: 0u,
+        match_lo: 0us,
+        match_cur: 0us,
         match_hi: match_idx_hi,
         sp_lo: lo
     }
@@ -206,7 +206,7 @@ pub enum NamedMatch {
 pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
             -> HashMap<Ident, Rc<NamedMatch>> {
     fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
-             ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
+             ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
         match m {
             &TtSequence(_, ref seq) => {
                 for next_m in seq.tts.iter() {
@@ -238,7 +238,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
         }
     }
     let mut ret_val = HashMap::new();
-    let mut idx = 0u;
+    let mut idx = 0us;
     for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
     ret_val
 }
@@ -383,7 +383,7 @@ pub fn parse(sess: &ParseSess,
                         if seq.op == ast::ZeroOrMore {
                             let mut new_ei = ei.clone();
                             new_ei.match_cur += seq.num_captures;
-                            new_ei.idx += 1u;
+                            new_ei.idx += 1us;
                             //we specifically matched zero repeats.
                             for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
                                 (&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
@@ -398,7 +398,7 @@ pub fn parse(sess: &ParseSess,
                         cur_eis.push(box MatcherPos {
                             stack: vec![],
                             sep: seq.separator.clone(),
-                            idx: 0u,
+                            idx: 0us,
                             matches: matches,
                             match_lo: ei_t.match_cur,
                             match_cur: ei_t.match_cur,
@@ -442,20 +442,20 @@ pub fn parse(sess: &ParseSess,
 
         /* error messages here could be improved with links to orig. rules */
         if token_name_eq(&tok, &token::Eof) {
-            if eof_eis.len() == 1u {
+            if eof_eis.len() == 1us {
                 let mut v = Vec::new();
                 for dv in (&mut eof_eis[0]).matches.iter_mut() {
                     v.push(dv.pop().unwrap());
                 }
                 return Success(nameize(sess, ms, &v[]));
-            } else if eof_eis.len() > 1u {
+            } else if eof_eis.len() > 1us {
                 return Error(sp, "ambiguity: multiple successful parses".to_string());
             } else {
                 return Failure(sp, "unexpected end of macro invocation".to_string());
             }
         } else {
-            if (bb_eis.len() > 0u && next_eis.len() > 0u)
-                || bb_eis.len() > 1u {
+            if (bb_eis.len() > 0us && next_eis.len() > 0us)
+                || bb_eis.len() > 1us {
                 let nts = bb_eis.iter().map(|ei| {
                     match ei.top_elts.get_tt(ei.idx) {
                       TtToken(_, MatchNt(bind, name, _, _)) => {
@@ -469,12 +469,12 @@ pub fn parse(sess: &ParseSess,
                     "local ambiguity: multiple parsing options: \
                      built-in NTs {} or {} other options.",
                     nts, next_eis.len()).to_string());
-            } else if bb_eis.len() == 0u && next_eis.len() == 0u {
+            } else if bb_eis.len() == 0us && next_eis.len() == 0us {
                 return Failure(sp, format!("no rules expected the token `{}`",
                             pprust::token_to_string(&tok)).to_string());
-            } else if next_eis.len() > 0u {
+            } else if next_eis.len() > 0us {
                 /* Now process the next token */
-                while next_eis.len() > 0u {
+                while next_eis.len() > 0us {
                     cur_eis.push(next_eis.pop().unwrap());
                 }
                 rdr.next_token();
@@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess,
                     let match_cur = ei.match_cur;
                     (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
                         parse_nt(&mut rust_parser, name_string.get()))));
-                    ei.idx += 1u;
+                    ei.idx += 1us;
                     ei.match_cur += 1;
                   }
                   _ => panic!()
@@ -501,16 +501,16 @@ pub fn parse(sess: &ParseSess,
             }
         }
 
-        assert!(cur_eis.len() > 0u);
+        assert!(cur_eis.len() > 0us);
     }
 }
 
 pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
     match name {
         "tt" => {
-            p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
+            p.quote_depth += 1us; //but in theory, non-quoted tts might be useful
             let res = token::NtTT(P(p.parse_token_tree()));
-            p.quote_depth -= 1u;
+            p.quote_depth -= 1us;
             return res;
         }
         _ => {}
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 94b8356130a..0bf20b8f3e1 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -27,7 +27,7 @@ use std::collections::HashMap;
 #[derive(Clone)]
 struct TtFrame {
     forest: TokenTree,
-    idx: uint,
+    idx: usize,
     dotdotdoted: bool,
     sep: Option<Token>,
 }
@@ -43,8 +43,8 @@ pub struct TtReader<'a> {
 
     // Some => return imported_from as the next token
     crate_name_next: Option<Span>,
-    repeat_idx: Vec<uint>,
-    repeat_len: Vec<uint>,
+    repeat_idx: Vec<usize>,
+    repeat_len: Vec<usize>,
     /* cached: */
     pub cur_tok: Token,
     pub cur_span: Span,
@@ -124,7 +124,7 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
 #[derive(Clone)]
 enum LockstepIterSize {
     LisUnconstrained,
-    LisConstraint(uint, Ident),
+    LisConstraint(usize, Ident),
     LisContradiction(String),
 }
 
@@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 r.repeat_len.pop();
             }
         } else { /* repeat */
-            *r.repeat_idx.last_mut().unwrap() += 1u;
+            *r.repeat_idx.last_mut().unwrap() += 1us;
             r.stack.last_mut().unwrap().idx = 0;
             match r.stack.last().unwrap().sep.clone() {
                 Some(tk) => {
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index f484650ad5b..a876b378060 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -174,8 +174,8 @@ pub trait Folder : Sized {
         noop_fold_ident(i, self)
     }
 
-    fn fold_uint(&mut self, i: uint) -> uint {
-        noop_fold_uint(i, self)
+    fn fold_usize(&mut self, i: usize) -> usize {
+        noop_fold_usize(i, self)
     }
 
     fn fold_path(&mut self, p: Path) -> Path {
@@ -505,7 +505,7 @@ pub fn noop_fold_ident<T: Folder>(i: Ident, _: &mut T) -> Ident {
     i
 }
 
-pub fn noop_fold_uint<T: Folder>(i: uint, _: &mut T) -> uint {
+pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize {
     i
 }
 
@@ -1371,7 +1371,7 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span}: Expr, folder: &mut T) ->
             }
             ExprTupField(el, ident) => {
                 ExprTupField(folder.fold_expr(el),
-                             respan(ident.span, folder.fold_uint(ident.node)))
+                             respan(ident.span, folder.fold_usize(ident.node)))
             }
             ExprIndex(el, er) => {
                 ExprIndex(folder.fold_expr(el), folder.fold_expr(er))
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 16ade904be8..2799696e8eb 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -22,7 +22,7 @@ use print::pprust;
 use std::io;
 use std::str;
 use std::string::String;
-use std::uint;
+use std::usize;
 
 #[derive(Clone, Copy, PartialEq)]
 pub enum CommentStyle {
@@ -62,7 +62,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
 pub fn strip_doc_comment_decoration(comment: &str) -> String {
     /// remove whitespace-only lines from the start/end of lines
     fn vertical_trim(lines: Vec<String> ) -> Vec<String> {
-        let mut i = 0u;
+        let mut i = 0us;
         let mut j = lines.len();
         // first line of all-stars should be omitted
         if lines.len() > 0 &&
@@ -87,7 +87,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
 
     /// remove a "[ \t]*\*" block from each line, if possible
     fn horizontal_trim(lines: Vec<String> ) -> Vec<String> {
-        let mut i = uint::MAX;
+        let mut i = usize::MAX;
         let mut can_trim = true;
         let mut first = true;
         for line in lines.iter() {
@@ -132,7 +132,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
     }
 
     if comment.starts_with("/*") {
-        let lines = comment[3u..(comment.len() - 2u)]
+        let lines = comment[3us..(comment.len() - 2us)]
             .lines_any()
             .map(|s| s.to_string())
             .collect::<Vec<String> >();
@@ -158,7 +158,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
 fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader,
                                            comments: &mut Vec<Comment>) {
     while is_whitespace(rdr.curr) && !rdr.is_eof() {
-        if rdr.col == CharPos(0u) && rdr.curr_is('\n') {
+        if rdr.col == CharPos(0us) && rdr.curr_is('\n') {
             push_blank_line_comment(rdr, &mut *comments);
         }
         rdr.bump();
@@ -206,10 +206,10 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool,
 /// Returns None if the first col chars of s contain a non-whitespace char.
 /// Otherwise returns Some(k) where k is first char offset after that leading
 /// whitespace.  Note k may be outside bounds of s.
-fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
+fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
     let len = s.len();
-    let mut col = col.to_uint();
-    let mut cursor: uint = 0;
+    let mut col = col.to_usize();
+    let mut cursor: usize = 0;
     while col > 0 && cursor < len {
         let r: str::CharRange = s.char_range_at(cursor);
         if !r.ch.is_whitespace() {
@@ -267,7 +267,7 @@ fn read_block_comment(rdr: &mut StringReader,
         assert!(!curr_line.contains_char('\n'));
         lines.push(curr_line);
     } else {
-        let mut level: int = 1;
+        let mut level: isize = 1;
         while level > 0 {
             debug!("=== block comment level {}", level);
             if rdr.is_eof() {
@@ -305,7 +305,7 @@ fn read_block_comment(rdr: &mut StringReader,
 
     let mut style = if code_to_the_left { Trailing } else { Isolated };
     rdr.consume_non_eol_whitespace();
-    if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1u {
+    if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1us {
         style = Mixed;
     }
     debug!("<<< block comment");
@@ -399,9 +399,9 @@ mod test {
     }
 
     #[test] fn test_block_doc_comment_3() {
-        let comment = "/**\n let a: *int;\n *a = 5;\n*/";
+        let comment = "/**\n let a: *i32;\n *a = 5;\n*/";
         let stripped = strip_doc_comment_decoration(comment);
-        assert_eq!(stripped, " let a: *int;\n *a = 5;");
+        assert_eq!(stripped, " let a: *i32;\n *a = 5;");
     }
 
     #[test] fn test_block_doc_comment_4() {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 4cdafb36eec..d18bf554975 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -212,8 +212,8 @@ impl<'a> StringReader<'a> {
     /// offending string to the error message
     fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> ! {
         m.push_str(": ");
-        let from = self.byte_offset(from_pos).to_uint();
-        let to = self.byte_offset(to_pos).to_uint();
+        let from = self.byte_offset(from_pos).to_usize();
+        let to = self.byte_offset(to_pos).to_usize();
         m.push_str(&self.filemap.src[from..to]);
         self.fatal_span_(from_pos, to_pos, &m[]);
     }
@@ -272,14 +272,14 @@ impl<'a> StringReader<'a> {
         F: FnOnce(&str) -> T,
     {
         f(self.filemap.src.slice(
-                self.byte_offset(start).to_uint(),
-                self.byte_offset(end).to_uint()))
+                self.byte_offset(start).to_usize(),
+                self.byte_offset(end).to_usize()))
     }
 
     /// Converts CRLF to LF in the given string, raising an error on bare CR.
     fn translate_crlf<'b>(&self, start: BytePos,
                           s: &'b str, errmsg: &'b str) -> CowString<'b> {
-        let mut i = 0u;
+        let mut i = 0us;
         while i < s.len() {
             let str::CharRange { ch, next } = s.char_range_at(i);
             if ch == '\r' {
@@ -295,7 +295,7 @@ impl<'a> StringReader<'a> {
         return s.into_cow();
 
         fn translate_crlf_(rdr: &StringReader, start: BytePos,
-                        s: &str, errmsg: &str, mut i: uint) -> String {
+                        s: &str, errmsg: &str, mut i: usize) -> String {
             let mut buf = String::with_capacity(s.len());
             let mut j = 0;
             while i < s.len() {
@@ -321,7 +321,7 @@ impl<'a> StringReader<'a> {
     /// discovered, add it to the FileMap's list of line start offsets.
     pub fn bump(&mut self) {
         self.last_pos = self.pos;
-        let current_byte_offset = self.byte_offset(self.pos).to_uint();
+        let current_byte_offset = self.byte_offset(self.pos).to_usize();
         if current_byte_offset < self.filemap.src.len() {
             assert!(self.curr.is_some());
             let last_char = self.curr.unwrap();
@@ -329,12 +329,12 @@ impl<'a> StringReader<'a> {
                           .src
                           .char_range_at(current_byte_offset);
             let byte_offset_diff = next.next - current_byte_offset;
-            self.pos = self.pos + Pos::from_uint(byte_offset_diff);
+            self.pos = self.pos + Pos::from_usize(byte_offset_diff);
             self.curr = Some(next.ch);
-            self.col = self.col + CharPos(1u);
+            self.col = self.col + CharPos(1us);
             if last_char == '\n' {
                 self.filemap.next_line(self.last_pos);
-                self.col = CharPos(0u);
+                self.col = CharPos(0us);
             }
 
             if byte_offset_diff > 1 {
@@ -346,7 +346,7 @@ impl<'a> StringReader<'a> {
     }
 
     pub fn nextch(&self) -> Option<char> {
-        let offset = self.byte_offset(self.pos).to_uint();
+        let offset = self.byte_offset(self.pos).to_usize();
         if offset < self.filemap.src.len() {
             Some(self.filemap.src.char_at(offset))
         } else {
@@ -359,7 +359,7 @@ impl<'a> StringReader<'a> {
     }
 
     pub fn nextnextch(&self) -> Option<char> {
-        let offset = self.byte_offset(self.pos).to_uint();
+        let offset = self.byte_offset(self.pos).to_usize();
         let s = self.filemap.src.as_slice();
         if offset >= s.len() { return None }
         let str::CharRange { next, .. } = s.char_range_at(offset);
@@ -472,7 +472,7 @@ impl<'a> StringReader<'a> {
                 cmap.files.borrow_mut().push(self.filemap.clone());
                 let loc = cmap.lookup_char_pos_adj(self.last_pos);
                 debug!("Skipping a shebang");
-                if loc.line == 1u && loc.col == CharPos(0u) {
+                if loc.line == 1us && loc.col == CharPos(0us) {
                     // FIXME: Add shebang "token", return it
                     let start = self.last_pos;
                     while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
@@ -519,7 +519,7 @@ impl<'a> StringReader<'a> {
         let is_doc_comment = self.curr_is('*') || self.curr_is('!');
         let start_bpos = self.last_pos - BytePos(2);
 
-        let mut level: int = 1;
+        let mut level: isize = 1;
         let mut has_cr = false;
         while level > 0 {
             if self.is_eof() {
@@ -645,8 +645,8 @@ impl<'a> StringReader<'a> {
 
     /// Scan through any digits (base `radix`) or underscores, and return how
     /// many digits there were.
-    fn scan_digits(&mut self, radix: uint) -> uint {
-        let mut len = 0u;
+    fn scan_digits(&mut self, radix: usize) -> usize {
+        let mut len = 0us;
         loop {
             let c = self.curr;
             if c == Some('_') { debug!("skipping a _"); self.bump(); continue; }
@@ -724,7 +724,7 @@ impl<'a> StringReader<'a> {
     /// Scan over `n_digits` hex digits, stopping at `delim`, reporting an
     /// error if too many or too few digits are encountered.
     fn scan_hex_digits(&mut self,
-                       n_digits: uint,
+                       n_digits: usize,
                        delim: char,
                        below_0x7f_only: bool)
                        -> bool {
@@ -799,14 +799,14 @@ impl<'a> StringReader<'a> {
                                 if self.curr == Some('{') {
                                     self.scan_unicode_escape(delim)
                                 } else {
-                                    let res = self.scan_hex_digits(4u, delim, false);
+                                    let res = self.scan_hex_digits(4us, delim, false);
                                     let sp = codemap::mk_sp(escaped_pos, self.last_pos);
                                     self.old_escape_warning(sp);
                                     res
                                 }
                             }
                             'U' if !ascii_only => {
-                                let res = self.scan_hex_digits(8u, delim, false);
+                                let res = self.scan_hex_digits(8us, delim, false);
                                 let sp = codemap::mk_sp(escaped_pos, self.last_pos);
                                 self.old_escape_warning(sp);
                                 res
@@ -877,7 +877,7 @@ impl<'a> StringReader<'a> {
     fn scan_unicode_escape(&mut self, delim: char) -> bool {
         self.bump(); // past the {
         let start_bpos = self.last_pos;
-        let mut count: uint = 0;
+        let mut count = 0us;
         let mut accum_int = 0;
 
         while !self.curr_is('}') && count <= 6 {
@@ -935,13 +935,13 @@ impl<'a> StringReader<'a> {
 
     /// Check that a base is valid for a floating literal, emitting a nice
     /// error if it isn't.
-    fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: uint) {
+    fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
         match base {
-            16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
-                                 supported"),
-            8u => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"),
-            2u => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"),
-            _ => ()
+            16us => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
+                                   supported"),
+            8us => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"),
+            2us => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"),
+            _   => ()
         }
     }
 
@@ -1189,7 +1189,7 @@ impl<'a> StringReader<'a> {
           'r' => {
             let start_bpos = self.last_pos;
             self.bump();
-            let mut hash_count = 0u;
+            let mut hash_count = 0us;
             while self.curr_is('#') {
                 self.bump();
                 hash_count += 1;
@@ -1374,7 +1374,7 @@ impl<'a> StringReader<'a> {
     fn scan_raw_byte_string(&mut self) -> token::Lit {
         let start_bpos = self.last_pos;
         self.bump();
-        let mut hash_count = 0u;
+        let mut hash_count = 0us;
         while self.curr_is('#') {
             self.bump();
             hash_count += 1;
@@ -1616,9 +1616,9 @@ mod test {
         test!("1.0", Float, "1.0");
         test!("1.0e10", Float, "1.0e10");
 
-        assert_eq!(setup(&mk_sh(), "2u".to_string()).next_token().tok,
+        assert_eq!(setup(&mk_sh(), "2us".to_string()).next_token().tok,
                    token::Literal(token::Integer(token::intern("2")),
-                                  Some(token::intern("u"))));
+                                  Some(token::intern("us"))));
         assert_eq!(setup(&mk_sh(), "r###\"raw\"###suffix".to_string()).next_token().tok,
                    token::Literal(token::StrRaw(token::intern("raw"), 3),
                                   Some(token::intern("suffix"))));
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index dd376fe9e10..28adba7eee7 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -181,7 +181,7 @@ pub fn parse_tts_from_source_str(name: String,
         name,
         source
     );
-    p.quote_depth += 1u;
+    p.quote_depth += 1us;
     // right now this is re-creating the token trees from ... token trees.
     maybe_aborted(p.parse_all_token_trees(),p)
 }
@@ -324,7 +324,7 @@ pub mod with_hygiene {
             name,
             source
         );
-        p.quote_depth += 1u;
+        p.quote_depth += 1us;
         // right now this is re-creating the token trees from ... token trees.
         maybe_aborted(p.parse_all_token_trees(),p)
     }
@@ -373,7 +373,7 @@ pub fn maybe_aborted<T>(result: T, mut p: Parser) -> T {
 /// Rather than just accepting/rejecting a given literal, unescapes it as
 /// well. Can take any slice prefixed by a character escape. Returns the
 /// character and the number of characters consumed.
-pub fn char_lit(lit: &str) -> (char, int) {
+pub fn char_lit(lit: &str) -> (char, isize) {
     use std::{num, char};
 
     let mut chars = lit.chars();
@@ -400,19 +400,19 @@ pub fn char_lit(lit: &str) -> (char, int) {
     let msg = format!("lexer should have rejected a bad character escape {}", lit);
     let msg2 = &msg[];
 
-    fn esc(len: uint, lit: &str) -> Option<(char, int)> {
+    fn esc(len: usize, lit: &str) -> Option<(char, isize)> {
         num::from_str_radix(&lit[2..len], 16)
         .and_then(char::from_u32)
-        .map(|x| (x, len as int))
+        .map(|x| (x, len as isize))
     }
 
-    let unicode_escape = |&: | -> Option<(char, int)>
+    let unicode_escape = |&: | -> Option<(char, isize)>
         if lit.as_bytes()[2] == b'{' {
             let idx = lit.find('}').expect(msg2);
             let subslice = &lit[3..idx];
             num::from_str_radix(subslice, 16)
                 .and_then(char::from_u32)
-                .map(|x| (x, subslice.chars().count() as int + 4))
+                .map(|x| (x, subslice.chars().count() as isize + 4))
         } else {
             esc(6, lit)
         };
@@ -436,7 +436,7 @@ pub fn str_lit(lit: &str) -> String {
     let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
+    fn eat<'a>(it: &mut iter::Peekable<(usize, char), str::CharIndices<'a>>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
@@ -567,13 +567,13 @@ pub fn float_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> a
 }
 
 /// Parse a string representing a byte literal into its final form. Similar to `char_lit`
-pub fn byte_lit(lit: &str) -> (u8, uint) {
+pub fn byte_lit(lit: &str) -> (u8, usize) {
     let err = |&: i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
 
     if lit.len() == 1 {
         (lit.as_bytes()[0], 1)
     } else {
-        assert!(lit.as_bytes()[0] == b'\\', err(0i));
+        assert!(lit.as_bytes()[0] == b'\\', err(0is));
         let b = match lit.as_bytes()[1] {
             b'"' => b'"',
             b'n' => b'\n',
@@ -605,7 +605,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
     let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a, I: Iterator<Item=(uint, u8)>>(it: &mut iter::Peekable<(uint, u8), I>) {
+    fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<(usize, u8), I>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
@@ -683,9 +683,9 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
     match suffix {
         Some(suf) if looks_like_width_suffix(&['f'], suf) => {
             match base {
-                16u => sd.span_err(sp, "hexadecimal float literal is not supported"),
-                8u => sd.span_err(sp, "octal float literal is not supported"),
-                2u => sd.span_err(sp, "binary float literal is not supported"),
+                16us => sd.span_err(sp, "hexadecimal float literal is not supported"),
+                8us => sd.span_err(sp, "octal float literal is not supported"),
+                2us => sd.span_err(sp, "binary float literal is not supported"),
                 _ => ()
             }
             let ident = token::intern_and_get_ident(&*s);
@@ -854,7 +854,7 @@ mod test {
 
     #[test]
     fn string_to_tts_1 () {
-        let tts = string_to_tts("fn a (b : int) { b; }".to_string());
+        let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
         assert_eq!(json::encode(&tts),
         "[\
     {\
@@ -918,7 +918,7 @@ mod test {
                             {\
                                 \"variant\":\"Ident\",\
                                 \"fields\":[\
-                                    \"int\",\
+                                    \"i32\",\
                                     \"Plain\"\
                                 ]\
                             }\
@@ -1030,8 +1030,8 @@ mod test {
 
     // check the contents of the tt manually:
     #[test] fn parse_fundecl () {
-        // this test depends on the intern order of "fn" and "int"
-        assert!(string_to_item("fn a (b : int) { b; }".to_string()) ==
+        // this test depends on the intern order of "fn" and "i32"
+        assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
                   Some(
                       P(ast::Item{ident:str_to_ident("a"),
                             attrs:Vec::new(),
@@ -1045,7 +1045,7 @@ mod test {
                                         segments: vec!(
                                             ast::PathSegment {
                                                 identifier:
-                                                    str_to_ident("int"),
+                                                    str_to_ident("i32"),
                                                 parameters: ast::PathParameters::none(),
                                             }
                                         ),
@@ -1158,19 +1158,19 @@ mod test {
 
     #[test] fn span_of_self_arg_pat_idents_are_correct() {
 
-        let srcs = ["impl z { fn a (&self, &myarg: int) {} }",
-                    "impl z { fn a (&mut self, &myarg: int) {} }",
-                    "impl z { fn a (&'a self, &myarg: int) {} }",
-                    "impl z { fn a (self, &myarg: int) {} }",
-                    "impl z { fn a (self: Foo, &myarg: int) {} }",
+        let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
+                    "impl z { fn a (&mut self, &myarg: i32) {} }",
+                    "impl z { fn a (&'a self, &myarg: i32) {} }",
+                    "impl z { fn a (self, &myarg: i32) {} }",
+                    "impl z { fn a (self: Foo, &myarg: i32) {} }",
                     ];
 
         for &src in srcs.iter() {
             let spans = get_spans_of_pat_idents(src);
             let Span{ lo, hi, .. } = spans[0];
-            assert!("self" == &src[lo.to_uint()..hi.to_uint()],
+            assert!("self" == &src[lo.to_usize()..hi.to_usize()],
                     "\"{}\" != \"self\". src=\"{}\"",
-                    &src[lo.to_uint()..hi.to_uint()], src)
+                    &src[lo.to_usize()..hi.to_usize()], src)
         }
     }
 
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 9d03ec73af8..a3600506057 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -62,7 +62,7 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
                 "use a `move ||` expression instead",
             ),
             ObsoleteSyntax::ClosureType => (
-                "`|uint| -> bool` closure type syntax",
+                "`|usize| -> bool` closure type syntax",
                 "use unboxed closures instead, no type annotation needed"
             ),
             ObsoleteSyntax::Sized => (
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 83a7504bc49..1a296d39360 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -291,11 +291,11 @@ pub struct Parser<'a> {
     /// the previous token or None (only stashed sometimes).
     pub last_token: Option<Box<token::Token>>,
     pub buffer: [TokenAndSpan; 4],
-    pub buffer_start: int,
-    pub buffer_end: int,
-    pub tokens_consumed: uint,
+    pub buffer_start: isize,
+    pub buffer_end: isize,
+    pub tokens_consumed: usize,
     pub restrictions: Restrictions,
-    pub quote_depth: uint, // not (yet) related to the quasiquoter
+    pub quote_depth: usize, // not (yet) related to the quasiquoter
     pub reader: Box<Reader+'a>,
     pub interner: Rc<token::IdentInterner>,
     /// The set of seen errors about obsolete syntax. Used to suppress
@@ -768,7 +768,7 @@ impl<'a> Parser<'a> {
         // would encounter a `>` and stop. This lets the parser handle trailing
         // commas in generic parameters, because it can stop either after
         // parsing a type or after parsing a comma.
-        for i in iter::count(0u, 1) {
+        for i in iter::count(0us, 1) {
             if self.check(&token::Gt)
                 || self.token == token::BinOp(token::Shr)
                 || self.token == token::Ge
@@ -933,9 +933,9 @@ impl<'a> Parser<'a> {
             self.reader.real_token()
         } else {
             // Avoid token copies with `replace`.
-            let buffer_start = self.buffer_start as uint;
-            let next_index = (buffer_start + 1) & 3 as uint;
-            self.buffer_start = next_index as int;
+            let buffer_start = self.buffer_start as usize;
+            let next_index = (buffer_start + 1) & 3 as usize;
+            self.buffer_start = next_index as isize;
 
             let placeholder = TokenAndSpan {
                 tok: token::Underscore,
@@ -945,7 +945,7 @@ impl<'a> Parser<'a> {
         };
         self.span = next.sp;
         self.token = next.tok;
-        self.tokens_consumed += 1u;
+        self.tokens_consumed += 1us;
         self.expected_tokens.clear();
         // check after each token
         self.check_unknown_macro_variable();
@@ -967,21 +967,21 @@ impl<'a> Parser<'a> {
         self.token = next;
         self.span = mk_sp(lo, hi);
     }
-    pub fn buffer_length(&mut self) -> int {
+    pub fn buffer_length(&mut self) -> isize {
         if self.buffer_start <= self.buffer_end {
             return self.buffer_end - self.buffer_start;
         }
         return (4 - self.buffer_start) + self.buffer_end;
     }
-    pub fn look_ahead<R, F>(&mut self, distance: uint, f: F) -> R where
+    pub fn look_ahead<R, F>(&mut self, distance: usize, f: F) -> R where
         F: FnOnce(&token::Token) -> R,
     {
-        let dist = distance as int;
+        let dist = distance as isize;
         while self.buffer_length() < dist {
-            self.buffer[self.buffer_end as uint] = self.reader.real_token();
+            self.buffer[self.buffer_end as usize] = self.reader.real_token();
             self.buffer_end = (self.buffer_end + 1) & 3;
         }
-        f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok)
+        f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
     }
     pub fn fatal(&mut self, m: &str) -> ! {
         self.sess.span_diagnostic.span_fatal(self.span, m)
@@ -1496,7 +1496,7 @@ impl<'a> Parser<'a> {
             self.expect(&token::OpenDelim(token::Bracket));
             let t = self.parse_ty_sum();
 
-            // Parse the `; e` in `[ int; e ]`
+            // Parse the `; e` in `[ i32; e ]`
             // where `e` is a const expression
             let t = match self.maybe_parse_fixed_length_of_vec() {
                 None => TyVec(t),
@@ -2084,7 +2084,7 @@ impl<'a> Parser<'a> {
         ExprField(expr, ident)
     }
 
-    pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
+    pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<usize>) -> ast::Expr_ {
         ExprTupField(expr, idx)
     }
 
@@ -2483,7 +2483,7 @@ impl<'a> Parser<'a> {
                     hi = self.span.hi;
                     self.bump();
 
-                    let index = n.as_str().parse::<uint>();
+                    let index = n.as_str().parse::<usize>();
                     match index {
                         Some(n) => {
                             let id = spanned(dot, hi, n);
@@ -2509,7 +2509,7 @@ impl<'a> Parser<'a> {
                         };
                         self.span_help(last_span,
                             &format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
-                                    float.trunc() as uint,
+                                    float.trunc() as usize,
                                     &float.fract().to_string()[1..])[]);
                     }
                     self.abort_if_errors();
@@ -2636,7 +2636,7 @@ impl<'a> Parser<'a> {
     }
 
     pub fn check_unknown_macro_variable(&mut self) {
-        if self.quote_depth == 0u {
+        if self.quote_depth == 0us {
             match self.token {
                 token::SubstNt(name, _) =>
                     self.fatal(&format!("unknown macro variable `{}`",
@@ -2705,7 +2705,7 @@ impl<'a> Parser<'a> {
                                     token_str)[])
                 },
                 /* we ought to allow different depths of unquotation */
-                token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => {
+                token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => {
                     p.parse_unquoted()
                 }
                 _ => {
@@ -2863,7 +2863,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse an expression of binops of at least min_prec precedence
-    pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: uint) -> P<Expr> {
+    pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> P<Expr> {
         if self.expr_is_complete(&*lhs) { return lhs; }
 
         // Prevent dynamic borrow errors later on by limiting the
@@ -4795,7 +4795,7 @@ impl<'a> Parser<'a> {
          Some(attrs))
     }
 
-    /// Parse a::B<String,int>
+    /// Parse a::B<String,i32>
     fn parse_trait_ref(&mut self) -> TraitRef {
         ast::TraitRef {
             path: self.parse_path(LifetimeAndTypesWithoutColons),
@@ -4814,7 +4814,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Parse for<'l> a::B<String,int>
+    /// Parse for<'l> a::B<String,i32>
     fn parse_poly_trait_ref(&mut self) -> PolyTraitRef {
         let lifetime_defs = self.parse_late_bound_lifetime_defs();
 
@@ -5071,7 +5071,7 @@ impl<'a> Parser<'a> {
             }
         }
 
-        if first && attrs_remaining_len > 0u {
+        if first && attrs_remaining_len > 0us {
             // We parsed attributes for the first item but didn't find it
             let last_span = self.last_span;
             self.span_err(last_span,
@@ -5668,7 +5668,7 @@ impl<'a> Parser<'a> {
             return IoviItem(item);
         }
         if self.token.is_keyword(keywords::Unsafe) &&
-            self.look_ahead(1u, |t| t.is_keyword(keywords::Trait))
+            self.look_ahead(1us, |t| t.is_keyword(keywords::Trait))
         {
             // UNSAFE TRAIT ITEM
             self.expect_keyword(keywords::Unsafe);
@@ -5685,7 +5685,7 @@ impl<'a> Parser<'a> {
             return IoviItem(item);
         }
         if self.token.is_keyword(keywords::Unsafe) &&
-            self.look_ahead(1u, |t| t.is_keyword(keywords::Impl))
+            self.look_ahead(1us, |t| t.is_keyword(keywords::Impl))
         {
             // IMPL ITEM
             self.expect_keyword(keywords::Unsafe);
@@ -5715,7 +5715,7 @@ impl<'a> Parser<'a> {
             return IoviItem(item);
         }
         if self.token.is_keyword(keywords::Unsafe)
-            && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) {
+            && self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) {
             // UNSAFE FUNCTION ITEM
             self.bump();
             let abi = if self.eat_keyword(keywords::Extern) {
@@ -6019,7 +6019,7 @@ impl<'a> Parser<'a> {
                 }
             }
         }
-        let mut rename_to = path[path.len() - 1u];
+        let mut rename_to = path[path.len() - 1us];
         let path = ast::Path {
             span: mk_sp(lo, self.last_span.hi),
             global: false,
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index e5aef12e827..e3762bb011c 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -83,9 +83,9 @@ pub enum Lit {
     Integer(ast::Name),
     Float(ast::Name),
     Str_(ast::Name),
-    StrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */
+    StrRaw(ast::Name, usize), /* raw str delimited by n hash symbols */
     Binary(ast::Name),
-    BinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
+    BinaryRaw(ast::Name, usize), /* raw binary str delimited by n hash symbols */
 }
 
 impl Lit {
@@ -724,7 +724,7 @@ pub fn intern(s: &str) -> ast::Name {
     get_ident_interner().intern(s)
 }
 
-/// gensym's a new uint, using the current interner.
+/// gensym's a new usize, using the current interner.
 #[inline]
 pub fn gensym(s: &str) -> ast::Name {
     get_ident_interner().gensym(s)
@@ -757,7 +757,7 @@ pub fn fresh_name(src: &ast::Ident) -> ast::Name {
 
 // create a fresh mark.
 pub fn fresh_mark() -> ast::Mrk {
-    gensym("mark").uint() as u32
+    gensym("mark").usize() as u32
 }
 
 #[cfg(test)]
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 06d510d37bd..0b1bd282941 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -31,7 +31,7 @@
 //!
 //! In particular you'll see a certain amount of churn related to INTEGER vs.
 //! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
-//! somewhat readily? In any case, I've used uint for indices-in-buffers and
+//! somewhat readily? In any case, I've used usize for indices-in-buffers and
 //! ints for character-sizes-and-indentation-offsets. This respects the need
 //! for ints to "go negative" while carrying a pending-calculation balance, and
 //! helps differentiate all the numbers flying around internally (slightly).
@@ -71,19 +71,19 @@ pub enum Breaks {
 
 #[derive(Clone, Copy)]
 pub struct BreakToken {
-    offset: int,
-    blank_space: int
+    offset: isize,
+    blank_space: isize
 }
 
 #[derive(Clone, Copy)]
 pub struct BeginToken {
-    offset: int,
+    offset: isize,
     breaks: Breaks
 }
 
 #[derive(Clone)]
 pub enum Token {
-    String(String, int),
+    String(String, isize),
     Break(BreakToken),
     Begin(BeginToken),
     End,
@@ -122,25 +122,25 @@ pub fn tok_str(token: &Token) -> String {
 }
 
 pub fn buf_str(toks: &[Token],
-               szs: &[int],
-               left: uint,
-               right: uint,
-               lim: uint)
+               szs: &[isize],
+               left: usize,
+               right: usize,
+               lim: usize)
                -> String {
     let n = toks.len();
     assert_eq!(n, szs.len());
     let mut i = left;
     let mut l = lim;
     let mut s = string::String::from_str("[");
-    while i != right && l != 0u {
-        l -= 1u;
+    while i != right && l != 0us {
+        l -= 1us;
         if i != left {
             s.push_str(", ");
         }
         s.push_str(&format!("{}={}",
                            szs[i],
                            tok_str(&toks[i]))[]);
-        i += 1u;
+        i += 1us;
         i %= n;
     }
     s.push(']');
@@ -155,25 +155,25 @@ pub enum PrintStackBreak {
 
 #[derive(Copy)]
 pub struct PrintStackElem {
-    offset: int,
+    offset: isize,
     pbreak: PrintStackBreak
 }
 
-static SIZE_INFINITY: int = 0xffff;
+static SIZE_INFINITY: isize = 0xffff;
 
-pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: uint) -> Printer {
+pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
     // Yes 3, it makes the ring buffers big enough to never
     // fall behind.
-    let n: uint = 3 * linewidth;
+    let n: usize = 3 * linewidth;
     debug!("mk_printer {}", linewidth);
     let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
-    let size: Vec<int> = repeat(0i).take(n).collect();
-    let scan_stack: Vec<uint> = repeat(0u).take(n).collect();
+    let size: Vec<isize> = repeat(0is).take(n).collect();
+    let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
     Printer {
         out: out,
         buf_len: n,
-        margin: linewidth as int,
-        space: linewidth as int,
+        margin: linewidth as isize,
+        space: linewidth as isize,
         left: 0,
         right: 0,
         token: token,
@@ -267,40 +267,40 @@ pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: uint) -> Printer {
 /// called 'print'.
 pub struct Printer {
     pub out: Box<io::Writer+'static>,
-    buf_len: uint,
+    buf_len: usize,
     /// Width of lines we're constrained to
-    margin: int,
+    margin: isize,
     /// Number of spaces left on line
-    space: int,
+    space: isize,
     /// Index of left side of input stream
-    left: uint,
+    left: usize,
     /// Index of right side of input stream
-    right: uint,
+    right: usize,
     /// Ring-buffer stream goes through
     token: Vec<Token> ,
     /// Ring-buffer of calculated sizes
-    size: Vec<int> ,
+    size: Vec<isize> ,
     /// Running size of stream "...left"
-    left_total: int,
+    left_total: isize,
     /// Running size of stream "...right"
-    right_total: int,
+    right_total: isize,
     /// Pseudo-stack, really a ring too. Holds the
     /// primary-ring-buffers index of the Begin that started the
     /// current block, possibly with the most recent Break after that
     /// Begin (if there is any) on top of it. Stuff is flushed off the
     /// bottom as it becomes irrelevant due to the primary ring-buffer
     /// advancing.
-    scan_stack: Vec<uint> ,
+    scan_stack: Vec<usize> ,
     /// Top==bottom disambiguator
     scan_stack_empty: bool,
     /// Index of top of scan_stack
-    top: uint,
+    top: usize,
     /// Index of bottom of scan_stack
-    bottom: uint,
+    bottom: usize,
     /// Stack of blocks-in-progress being flushed by print
     print_stack: Vec<PrintStackElem> ,
     /// Buffered indentation to avoid writing trailing whitespace
-    pending_indentation: int,
+    pending_indentation: isize,
 }
 
 impl Printer {
@@ -326,8 +326,8 @@ impl Printer {
             if self.scan_stack_empty {
                 self.left_total = 1;
                 self.right_total = 1;
-                self.left = 0u;
-                self.right = 0u;
+                self.left = 0us;
+                self.right = 0us;
             } else { self.advance_right(); }
             debug!("pp Begin({})/buffer ~[{},{}]",
                    b.offset, self.left, self.right);
@@ -355,8 +355,8 @@ impl Printer {
             if self.scan_stack_empty {
                 self.left_total = 1;
                 self.right_total = 1;
-                self.left = 0u;
-                self.right = 0u;
+                self.left = 0us;
+                self.right = 0us;
             } else { self.advance_right(); }
             debug!("pp Break({})/buffer ~[{},{}]",
                    b.offset, self.left, self.right);
@@ -405,43 +405,43 @@ impl Printer {
         }
         Ok(())
     }
-    pub fn scan_push(&mut self, x: uint) {
+    pub fn scan_push(&mut self, x: usize) {
         debug!("scan_push {}", x);
         if self.scan_stack_empty {
             self.scan_stack_empty = false;
         } else {
-            self.top += 1u;
+            self.top += 1us;
             self.top %= self.buf_len;
             assert!((self.top != self.bottom));
         }
         self.scan_stack[self.top] = x;
     }
-    pub fn scan_pop(&mut self) -> uint {
+    pub fn scan_pop(&mut self) -> usize {
         assert!((!self.scan_stack_empty));
         let x = self.scan_stack[self.top];
         if self.top == self.bottom {
             self.scan_stack_empty = true;
         } else {
-            self.top += self.buf_len - 1u; self.top %= self.buf_len;
+            self.top += self.buf_len - 1us; self.top %= self.buf_len;
         }
         return x;
     }
-    pub fn scan_top(&mut self) -> uint {
+    pub fn scan_top(&mut self) -> usize {
         assert!((!self.scan_stack_empty));
         return self.scan_stack[self.top];
     }
-    pub fn scan_pop_bottom(&mut self) -> uint {
+    pub fn scan_pop_bottom(&mut self) -> usize {
         assert!((!self.scan_stack_empty));
         let x = self.scan_stack[self.bottom];
         if self.top == self.bottom {
             self.scan_stack_empty = true;
         } else {
-            self.bottom += 1u; self.bottom %= self.buf_len;
+            self.bottom += 1us; self.bottom %= self.buf_len;
         }
         return x;
     }
     pub fn advance_right(&mut self) {
-        self.right += 1u;
+        self.right += 1us;
         self.right %= self.buf_len;
         assert!((self.right != self.left));
     }
@@ -471,7 +471,7 @@ impl Printer {
                 break;
             }
 
-            self.left += 1u;
+            self.left += 1us;
             self.left %= self.buf_len;
 
             left_size = self.size[self.left];
@@ -479,7 +479,7 @@ impl Printer {
 
         Ok(())
     }
-    pub fn check_stack(&mut self, k: int) {
+    pub fn check_stack(&mut self, k: isize) {
         if !self.scan_stack_empty {
             let x = self.scan_top();
             match self.token[x] {
@@ -506,21 +506,21 @@ impl Printer {
             }
         }
     }
-    pub fn print_newline(&mut self, amount: int) -> io::IoResult<()> {
+    pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> {
         debug!("NEWLINE {}", amount);
         let ret = write!(self.out, "\n");
         self.pending_indentation = 0;
         self.indent(amount);
         return ret;
     }
-    pub fn indent(&mut self, amount: int) {
+    pub fn indent(&mut self, amount: isize) {
         debug!("INDENT {}", amount);
         self.pending_indentation += amount;
     }
     pub fn get_top(&mut self) -> PrintStackElem {
         let print_stack = &mut self.print_stack;
         let n = print_stack.len();
-        if n != 0u {
+        if n != 0us {
             (*print_stack)[n - 1]
         } else {
             PrintStackElem {
@@ -536,7 +536,7 @@ impl Printer {
         }
         write!(self.out, "{}", s)
     }
-    pub fn print(&mut self, token: Token, l: int) -> io::IoResult<()> {
+    pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> {
         debug!("print {} {} (remaining line space={})", tok_str(&token), l,
                self.space);
         debug!("{}", buf_str(&self.token[],
@@ -565,7 +565,7 @@ impl Printer {
           Token::End => {
             debug!("print End -> pop End");
             let print_stack = &mut self.print_stack;
-            assert!((print_stack.len() != 0u));
+            assert!((print_stack.len() != 0us));
             print_stack.pop().unwrap();
             Ok(())
           }
@@ -620,25 +620,25 @@ impl Printer {
 // Convenience functions to talk to the printer.
 //
 // "raw box"
-pub fn rbox(p: &mut Printer, indent: uint, b: Breaks) -> io::IoResult<()> {
+pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> {
     p.pretty_print(Token::Begin(BeginToken {
-        offset: indent as int,
+        offset: indent as isize,
         breaks: b
     }))
 }
 
-pub fn ibox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
+pub fn ibox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
     rbox(p, indent, Breaks::Inconsistent)
 }
 
-pub fn cbox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
+pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
     rbox(p, indent, Breaks::Consistent)
 }
 
-pub fn break_offset(p: &mut Printer, n: uint, off: int) -> io::IoResult<()> {
+pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> io::IoResult<()> {
     p.pretty_print(Token::Break(BreakToken {
         offset: off,
-        blank_space: n as int
+        blank_space: n as isize
     }))
 }
 
@@ -651,7 +651,7 @@ pub fn eof(p: &mut Printer) -> io::IoResult<()> {
 }
 
 pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
-    p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as int))
+    p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize))
 }
 
 pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
@@ -662,23 +662,23 @@ pub fn zero_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
     p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
 }
 
-pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {
+pub fn spaces(p: &mut Printer, n: usize) -> io::IoResult<()> {
     break_offset(p, n, 0)
 }
 
 pub fn zerobreak(p: &mut Printer) -> io::IoResult<()> {
-    spaces(p, 0u)
+    spaces(p, 0us)
 }
 
 pub fn space(p: &mut Printer) -> io::IoResult<()> {
-    spaces(p, 1u)
+    spaces(p, 1us)
 }
 
 pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> {
-    spaces(p, SIZE_INFINITY as uint)
+    spaces(p, SIZE_INFINITY as usize)
 }
 
-pub fn hardbreak_tok_offset(off: int) -> Token {
+pub fn hardbreak_tok_offset(off: isize) -> Token {
     Token::Break(BreakToken {offset: off, blank_space: SIZE_INFINITY})
 }
 
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index b59e770c6ba..4010f433865 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -54,8 +54,8 @@ impl PpAnn for NoAnn {}
 
 #[derive(Copy)]
 pub struct CurrentCommentAndLiteral {
-    cur_cmnt: uint,
-    cur_lit: uint,
+    cur_cmnt: usize,
+    cur_lit: usize,
 }
 
 pub struct State<'a> {
@@ -92,10 +92,10 @@ pub fn rust_printer_annotated<'a>(writer: Box<io::Writer+'static>,
 }
 
 #[allow(non_upper_case_globals)]
-pub const indent_unit: uint = 4u;
+pub const indent_unit: usize = 4us;
 
 #[allow(non_upper_case_globals)]
-pub const default_columns: uint = 78u;
+pub const default_columns: usize = 78us;
 
 /// Requires you to pass an input filename and reader so that
 /// it can scan the input text for comments and literals to
@@ -381,7 +381,7 @@ pub fn block_to_string(blk: &ast::Block) -> String {
         // containing cbox, will be closed by print-block at }
         try!(s.cbox(indent_unit));
         // head-ibox, will be closed by print-block after {
-        try!(s.ibox(0u));
+        try!(s.ibox(0us));
         s.print_block(blk)
     })
 }
@@ -459,7 +459,7 @@ fn needs_parentheses(expr: &ast::Expr) -> bool {
 }
 
 impl<'a> State<'a> {
-    pub fn ibox(&mut self, u: uint) -> IoResult<()> {
+    pub fn ibox(&mut self, u: usize) -> IoResult<()> {
         self.boxes.push(pp::Breaks::Inconsistent);
         pp::ibox(&mut self.s, u)
     }
@@ -469,13 +469,13 @@ impl<'a> State<'a> {
         pp::end(&mut self.s)
     }
 
-    pub fn cbox(&mut self, u: uint) -> IoResult<()> {
+    pub fn cbox(&mut self, u: usize) -> IoResult<()> {
         self.boxes.push(pp::Breaks::Consistent);
         pp::cbox(&mut self.s, u)
     }
 
     // "raw box"
-    pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> {
+    pub fn rbox(&mut self, u: usize, b: pp::Breaks) -> IoResult<()> {
         self.boxes.push(b);
         pp::rbox(&mut self.s, u, b)
     }
@@ -514,13 +514,13 @@ impl<'a> State<'a> {
     }
 
     pub fn bclose_(&mut self, span: codemap::Span,
-                   indented: uint) -> IoResult<()> {
+                   indented: usize) -> IoResult<()> {
         self.bclose_maybe_open(span, indented, true)
     }
     pub fn bclose_maybe_open (&mut self, span: codemap::Span,
-                              indented: uint, close_box: bool) -> IoResult<()> {
+                              indented: usize, close_box: bool) -> IoResult<()> {
         try!(self.maybe_print_comment(span.hi));
-        try!(self.break_offset_if_not_bol(1u, -(indented as int)));
+        try!(self.break_offset_if_not_bol(1us, -(indented as isize)));
         try!(word(&mut self.s, "}"));
         if close_box {
             try!(self.end()); // close the outer-box
@@ -567,8 +567,8 @@ impl<'a> State<'a> {
         if !self.is_bol() { try!(space(&mut self.s)); }
         Ok(())
     }
-    pub fn break_offset_if_not_bol(&mut self, n: uint,
-                                   off: int) -> IoResult<()> {
+    pub fn break_offset_if_not_bol(&mut self, n: usize,
+                                   off: isize) -> IoResult<()> {
         if !self.is_bol() {
             break_offset(&mut self.s, n, off)
         } else {
@@ -595,7 +595,7 @@ impl<'a> State<'a> {
     pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where
         F: FnMut(&mut State, &T) -> IoResult<()>,
     {
-        try!(self.rbox(0u, b));
+        try!(self.rbox(0us, b));
         let mut first = true;
         for elt in elts.iter() {
             if first { first = false; } else { try!(self.word_space(",")); }
@@ -613,13 +613,13 @@ impl<'a> State<'a> {
         F: FnMut(&mut State, &T) -> IoResult<()>,
         G: FnMut(&T) -> codemap::Span,
     {
-        try!(self.rbox(0u, b));
+        try!(self.rbox(0us, b));
         let len = elts.len();
-        let mut i = 0u;
+        let mut i = 0us;
         for elt in elts.iter() {
             try!(self.maybe_print_comment(get_span(elt).hi));
             try!(op(self, elt));
-            i += 1u;
+            i += 1us;
             if i < len {
                 try!(word(&mut self.s, ","));
                 try!(self.maybe_print_trailing_comment(get_span(elt),
@@ -670,7 +670,7 @@ impl<'a> State<'a> {
 
     pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
         try!(self.maybe_print_comment(ty.span.lo));
-        try!(self.ibox(0u));
+        try!(self.ibox(0us));
         match ty.node {
             ast::TyVec(ref ty) => {
                 try!(word(&mut self.s, "["));
@@ -871,7 +871,7 @@ impl<'a> State<'a> {
             }
             ast::ItemTy(ref ty, ref params) => {
                 try!(self.ibox(indent_unit));
-                try!(self.ibox(0u));
+                try!(self.ibox(0us));
                 try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[]));
                 try!(self.print_ident(item.ident));
                 try!(self.print_generics(params));
@@ -1262,7 +1262,7 @@ impl<'a> State<'a> {
 
     pub fn print_outer_attributes(&mut self,
                                   attrs: &[ast::Attribute]) -> IoResult<()> {
-        let mut count = 0u;
+        let mut count = 0us;
         for attr in attrs.iter() {
             match attr.node.style {
                 ast::AttrOuter => {
@@ -1280,7 +1280,7 @@ impl<'a> State<'a> {
 
     pub fn print_inner_attributes(&mut self,
                                   attrs: &[ast::Attribute]) -> IoResult<()> {
-        let mut count = 0u;
+        let mut count = 0us;
         for attr in attrs.iter() {
             match attr.node.style {
                 ast::AttrInner => {
@@ -1355,7 +1355,7 @@ impl<'a> State<'a> {
     }
 
     pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
-                                       indented: uint) -> IoResult<()> {
+                                       indented: usize) -> IoResult<()> {
         self.print_block_maybe_unclosed(blk, indented, &[], false)
     }
 
@@ -1367,7 +1367,7 @@ impl<'a> State<'a> {
 
     pub fn print_block_maybe_unclosed(&mut self,
                                       blk: &ast::Block,
-                                      indented: uint,
+                                      indented: usize,
                                       attrs: &[ast::Attribute],
                                       close_box: bool) -> IoResult<()> {
         match blk.rules {
@@ -1404,8 +1404,8 @@ impl<'a> State<'a> {
                 match _else.node {
                     // "another else-if"
                     ast::ExprIf(ref i, ref then, ref e) => {
-                        try!(self.cbox(indent_unit - 1u));
-                        try!(self.ibox(0u));
+                        try!(self.cbox(indent_unit - 1us));
+                        try!(self.ibox(0us));
                         try!(word(&mut self.s, " else if "));
                         try!(self.print_expr(&**i));
                         try!(space(&mut self.s));
@@ -1414,8 +1414,8 @@ impl<'a> State<'a> {
                     }
                     // "another else-if-let"
                     ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => {
-                        try!(self.cbox(indent_unit - 1u));
-                        try!(self.ibox(0u));
+                        try!(self.cbox(indent_unit - 1us));
+                        try!(self.ibox(0us));
                         try!(word(&mut self.s, " else if let "));
                         try!(self.print_pat(&**pat));
                         try!(space(&mut self.s));
@@ -1427,8 +1427,8 @@ impl<'a> State<'a> {
                     }
                     // "final else"
                     ast::ExprBlock(ref b) => {
-                        try!(self.cbox(indent_unit - 1u));
-                        try!(self.ibox(0u));
+                        try!(self.cbox(indent_unit - 1us));
+                        try!(self.ibox(0us));
                         try!(word(&mut self.s, " else "));
                         self.print_block(&**b)
                     }
@@ -1594,7 +1594,7 @@ impl<'a> State<'a> {
         try!(self.print_expr(&*args[0]));
         try!(word(&mut self.s, "."));
         try!(self.print_ident(ident.node));
-        if tys.len() > 0u {
+        if tys.len() > 0us {
             try!(word(&mut self.s, "::<"));
             try!(self.commasep(Inconsistent, tys,
                                |s, ty| s.print_type(&**ty)));
@@ -1765,7 +1765,7 @@ impl<'a> State<'a> {
                 // containing cbox, will be closed by print-block at }
                 try!(self.cbox(indent_unit));
                 // head-box, will be closed by print-block after {
-                try!(self.ibox(0u));
+                try!(self.ibox(0us));
                 try!(self.print_block(&**blk));
             }
             ast::ExprAssign(ref lhs, ref rhs) => {
@@ -1789,7 +1789,7 @@ impl<'a> State<'a> {
             ast::ExprTupField(ref expr, id) => {
                 try!(self.print_expr(&**expr));
                 try!(word(&mut self.s, "."));
-                try!(self.print_uint(id.node));
+                try!(self.print_usize(id.node));
             }
             ast::ExprIndex(ref expr, ref index) => {
                 try!(self.print_expr(&**expr));
@@ -1951,7 +1951,7 @@ impl<'a> State<'a> {
         self.ann.post(self, NodeIdent(&ident))
     }
 
-    pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
+    pub fn print_usize(&mut self, i: usize) -> IoResult<()> {
         word(&mut self.s, &i.to_string()[])
     }
 
@@ -2142,7 +2142,7 @@ impl<'a> State<'a> {
                     },
                     |f| f.node.pat.span));
                 if etc {
-                    if fields.len() != 0u { try!(self.word_space(",")); }
+                    if fields.len() != 0us { try!(self.word_space(",")); }
                     try!(word(&mut self.s, ".."));
                 }
                 try!(space(&mut self.s));
@@ -2209,7 +2209,7 @@ impl<'a> State<'a> {
             try!(space(&mut self.s));
         }
         try!(self.cbox(indent_unit));
-        try!(self.ibox(0u));
+        try!(self.ibox(0us));
         try!(self.print_outer_attributes(&arm.attrs[]));
         let mut first = true;
         for p in arm.pats.iter() {
@@ -2295,7 +2295,7 @@ impl<'a> State<'a> {
         -> IoResult<()> {
         // It is unfortunate to duplicate the commasep logic, but we want the
         // self type and the args all in the same box.
-        try!(self.rbox(0u, Inconsistent));
+        try!(self.rbox(0us, Inconsistent));
         let mut first = true;
         for &explicit_self in opt_explicit_self.iter() {
             let m = match explicit_self {
@@ -2470,7 +2470,7 @@ impl<'a> State<'a> {
         try!(word(&mut self.s, "<"));
 
         let mut ints = Vec::new();
-        for i in range(0u, total) {
+        for i in range(0us, total) {
             ints.push(i);
         }
 
@@ -2788,7 +2788,7 @@ impl<'a> State<'a> {
                 if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
                     span_line.line == comment_line.line {
                         try!(self.print_comment(cmnt));
-                        self.cur_cmnt_and_lit.cur_cmnt += 1u;
+                        self.cur_cmnt_and_lit.cur_cmnt += 1us;
                     }
             }
             _ => ()
@@ -2806,7 +2806,7 @@ impl<'a> State<'a> {
             match self.next_comment() {
                 Some(ref cmnt) => {
                     try!(self.print_comment(cmnt));
-                    self.cur_cmnt_and_lit.cur_cmnt += 1u;
+                    self.cur_cmnt_and_lit.cur_cmnt += 1us;
                 }
                 _ => break
             }
@@ -2888,7 +2888,7 @@ impl<'a> State<'a> {
                 while self.cur_cmnt_and_lit.cur_lit < lits.len() {
                     let ltrl = (*lits)[self.cur_cmnt_and_lit.cur_lit].clone();
                     if ltrl.pos > pos { return None; }
-                    self.cur_cmnt_and_lit.cur_lit += 1u;
+                    self.cur_cmnt_and_lit.cur_lit += 1us;
                     if ltrl.pos == pos { return Some(ltrl); }
                 }
                 None
@@ -2903,7 +2903,7 @@ impl<'a> State<'a> {
                 Some(ref cmnt) => {
                     if (*cmnt).pos < pos {
                         try!(self.print_comment(cmnt));
-                        self.cur_cmnt_and_lit.cur_cmnt += 1u;
+                        self.cur_cmnt_and_lit.cur_cmnt += 1us;
                     } else { break; }
                 }
                 _ => break
@@ -2916,7 +2916,7 @@ impl<'a> State<'a> {
                          cmnt: &comments::Comment) -> IoResult<()> {
         match cmnt.style {
             comments::Mixed => {
-                assert_eq!(cmnt.lines.len(), 1u);
+                assert_eq!(cmnt.lines.len(), 1us);
                 try!(zerobreak(&mut self.s));
                 try!(word(&mut self.s, &cmnt.lines[0][]));
                 zerobreak(&mut self.s)
@@ -2935,11 +2935,11 @@ impl<'a> State<'a> {
             }
             comments::Trailing => {
                 try!(word(&mut self.s, " "));
-                if cmnt.lines.len() == 1u {
+                if cmnt.lines.len() == 1us {
                     try!(word(&mut self.s, &cmnt.lines[0][]));
                     hardbreak(&mut self.s)
                 } else {
-                    try!(self.ibox(0u));
+                    try!(self.ibox(0us));
                     for line in cmnt.lines.iter() {
                         if !line.is_empty() {
                             try!(word(&mut self.s, &line[]));
@@ -3047,7 +3047,7 @@ impl<'a> State<'a> {
     }
 }
 
-fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() }
+fn repeat(s: &str, n: usize) -> String { iter::repeat(s).take(n).collect() }
 
 #[cfg(test)]
 mod test {
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 5f869d5093f..3022b2b41b7 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -339,8 +339,8 @@ fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
                 let tparm_cnt = generics.ty_params.len();
                 // NB: inadequate check, but we're running
                 // well before resolve, can't get too deep.
-                input_cnt == 1u
-                    && no_output && tparm_cnt == 0u
+                input_cnt == 1us
+                    && no_output && tparm_cnt == 0us
             }
           _ => false
         }
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
index 5dca39f1aea..66b225f30dd 100644
--- a/src/libsyntax/util/interner.rs
+++ b/src/libsyntax/util/interner.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! An "interner" is a data structure that associates values with uint tags and
+//! An "interner" is a data structure that associates values with usize tags and
 //! allows bidirectional lookup; i.e. given a value, one can easily find the
 //! type, and vice versa.
 
@@ -70,10 +70,10 @@ impl<T: Eq + Hash<Hasher> + Clone + 'static> Interner<T> {
 
     pub fn get(&self, idx: Name) -> T {
         let vect = self.vect.borrow();
-        (*vect)[idx.uint()].clone()
+        (*vect)[idx.usize()].clone()
     }
 
-    pub fn len(&self) -> uint {
+    pub fn len(&self) -> usize {
         let vect = self.vect.borrow();
         (*vect).len()
     }
@@ -190,16 +190,16 @@ impl StrInterner {
         let new_idx = Name(self.len() as u32);
         // leave out of map to avoid colliding
         let mut vect = self.vect.borrow_mut();
-        let existing = (*vect)[idx.uint()].clone();
+        let existing = (*vect)[idx.usize()].clone();
         vect.push(existing);
         new_idx
     }
 
     pub fn get(&self, idx: Name) -> RcStr {
-        (*self.vect.borrow())[idx.uint()].clone()
+        (*self.vect.borrow())[idx.usize()].clone()
     }
 
-    pub fn len(&self) -> uint {
+    pub fn len(&self) -> usize {
         self.vect.borrow().len()
     }
 
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index 83bbff8473d..5466b7337e7 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -130,10 +130,10 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool {
     }
 }
 
-/// Given a string and an index, return the first uint >= idx
+/// Given a string and an index, return the first usize >= idx
 /// that is a non-ws-char or is outside of the legal range of
 /// the string.
-fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
+fn scan_for_non_ws_or_end(a : &str, idx: usize) -> usize {
     let mut i = idx;
     let len = a.len();
     while (i < len) && (is_whitespace(a.char_at(i))) {
diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs
index b68c9926391..d8bac19805b 100644
--- a/src/libsyntax/util/small_vector.rs
+++ b/src/libsyntax/util/small_vector.rs
@@ -89,7 +89,7 @@ impl<T> SmallVector<T> {
         }
     }
 
-    pub fn get<'a>(&'a self, idx: uint) -> &'a T {
+    pub fn get<'a>(&'a self, idx: usize) -> &'a T {
         match self.repr {
             One(ref v) if idx == 0 => v,
             Many(ref vs) => &vs[idx],
@@ -126,7 +126,7 @@ impl<T> SmallVector<T> {
         IntoIter { repr: repr }
     }
 
-    pub fn len(&self) -> uint {
+    pub fn len(&self) -> usize {
         match self.repr {
             Zero => 0,
             One(..) => 1,
@@ -165,7 +165,7 @@ impl<T> Iterator for IntoIter<T> {
         }
     }
 
-    fn size_hint(&self) -> (uint, Option<uint>) {
+    fn size_hint(&self) -> (usize, Option<usize>) {
         match self.repr {
             ZeroIterator => (0, Some(0)),
             OneIterator(..) => (1, Some(1)),
@@ -191,17 +191,17 @@ mod test {
 
     #[test]
     fn test_len() {
-        let v: SmallVector<int> = SmallVector::zero();
+        let v: SmallVector<isize> = SmallVector::zero();
         assert_eq!(0, v.len());
 
-        assert_eq!(1, SmallVector::one(1i).len());
-        assert_eq!(5, SmallVector::many(vec!(1i, 2, 3, 4, 5)).len());
+        assert_eq!(1, SmallVector::one(1is).len());
+        assert_eq!(5, SmallVector::many(vec!(1is, 2, 3, 4, 5)).len());
     }
 
     #[test]
     fn test_push_get() {
         let mut v = SmallVector::zero();
-        v.push(1i);
+        v.push(1is);
         assert_eq!(1, v.len());
         assert_eq!(&1, v.get(0));
         v.push(2);
@@ -214,7 +214,7 @@ mod test {
 
     #[test]
     fn test_from_iter() {
-        let v: SmallVector<int> = (vec!(1i, 2, 3)).into_iter().collect();
+        let v: SmallVector<isize> = (vec![1is, 2, 3]).into_iter().collect();
         assert_eq!(3, v.len());
         assert_eq!(&1, v.get(0));
         assert_eq!(&2, v.get(1));
@@ -224,31 +224,31 @@ mod test {
     #[test]
     fn test_move_iter() {
         let v = SmallVector::zero();
-        let v: Vec<int> = v.into_iter().collect();
+        let v: Vec<isize> = v.into_iter().collect();
         assert_eq!(Vec::new(), v);
 
-        let v = SmallVector::one(1i);
-        assert_eq!(vec!(1i), v.into_iter().collect::<Vec<_>>());
+        let v = SmallVector::one(1is);
+        assert_eq!(vec!(1is), v.into_iter().collect::<Vec<_>>());
 
-        let v = SmallVector::many(vec!(1i, 2i, 3i));
-        assert_eq!(vec!(1i, 2i, 3i), v.into_iter().collect::<Vec<_>>());
+        let v = SmallVector::many(vec!(1is, 2is, 3is));
+        assert_eq!(vec!(1is, 2is, 3is), v.into_iter().collect::<Vec<_>>());
     }
 
     #[test]
     #[should_fail]
     fn test_expect_one_zero() {
-        let _: int = SmallVector::zero().expect_one("");
+        let _: isize = SmallVector::zero().expect_one("");
     }
 
     #[test]
     #[should_fail]
     fn test_expect_one_many() {
-        SmallVector::many(vec!(1i, 2)).expect_one("");
+        SmallVector::many(vec!(1is, 2)).expect_one("");
     }
 
     #[test]
     fn test_expect_one_one() {
-        assert_eq!(1i, SmallVector::one(1i).expect_one(""));
-        assert_eq!(1i, SmallVector::many(vec!(1i)).expect_one(""));
+        assert_eq!(1is, SmallVector::one(1is).expect_one(""));
+        assert_eq!(1is, SmallVector::many(vec!(1is)).expect_one(""));
     }
 }
diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/auxiliary/roman_numerals.rs
index 519f32fc248..a4edc607279 100644
--- a/src/test/auxiliary/roman_numerals.rs
+++ b/src/test/auxiliary/roman_numerals.rs
@@ -20,7 +20,7 @@ use syntax::codemap::Span;
 use syntax::parse::token;
 use syntax::ast::{TokenTree, TtToken};
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
-use syntax::ext::build::AstBuilder;  // trait for expr_uint
+use syntax::ext::build::AstBuilder;  // trait for expr_usize
 use rustc::plugin::Registry;
 
 // WARNING WARNING WARNING WARNING WARNING
@@ -61,7 +61,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
         }
     }
 
-    MacExpr::new(cx.expr_uint(sp, total))
+    MacExpr::new(cx.expr_usize(sp, total))
 }
 
 #[plugin_registrar]